Image-Text-to-Text
Transformers
ONNX
Safetensors
English
idefics3
conversational

Fast Batch Inference cant run (on Google colab

#11
by JustCleanMan - opened

I was attempting to run the Fast Batch Inference on a Colab but I get the following error

ModuleNotFoundError                       Traceback (most recent call last)
/usr/local/lib/python3.11/dist-packages/transformers/utils/import_utils.py in _get_module(self, module_name)
   1816         try:
-> 1817             return importlib.import_module("." + module_name, self.__name__)
   1818         except Exception as e:

52 frames
/usr/lib/python3.11/importlib/__init__.py in import_module(name, package)
    125             level += 1
--> 126     return _bootstrap._gcd_import(name[level:], package, level)
    127 

/usr/lib/python3.11/importlib/_bootstrap.py in _gcd_import(name, package, level)

/usr/lib/python3.11/importlib/_bootstrap.py in _find_and_load(name, import_)

/usr/lib/python3.11/importlib/_bootstrap.py in _find_and_load_unlocked(name, import_)

/usr/lib/python3.11/importlib/_bootstrap.py in _load_unlocked(spec)

/usr/lib/python3.11/importlib/_bootstrap_external.py in exec_module(self, module)

/usr/lib/python3.11/importlib/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)

/usr/local/lib/python3.11/dist-packages/transformers/generation/utils.py in <module>
     52 from .beam_search import BeamScorer, BeamSearchScorer, ConstrainedBeamSearchScorer
---> 53 from .candidate_generator import (
     54     AssistedCandidateGenerator,

/usr/local/lib/python3.11/dist-packages/transformers/generation/candidate_generator.py in <module>
     25 if is_sklearn_available():
---> 26     from sklearn.metrics import roc_curve
     27 

/usr/local/lib/python3.11/dist-packages/sklearn/__init__.py in <module>
     72 )
---> 73 from .base import clone  # noqa: E402
     74 from .utils._show_versions import show_versions  # noqa: E402

/usr/local/lib/python3.11/dist-packages/sklearn/base.py in <module>
     18 from .exceptions import InconsistentVersionWarning
---> 19 from .utils._estimator_html_repr import _HTMLDocumentationLinkMixin, estimator_html_repr
     20 from .utils._metadata_requests import _MetadataRequester, _routing_enabled

/usr/local/lib/python3.11/dist-packages/sklearn/utils/__init__.py in <module>
     14 from ._bunch import Bunch
---> 15 from ._chunking import gen_batches, gen_even_slices
     16 from ._estimator_html_repr import estimator_html_repr

/usr/local/lib/python3.11/dist-packages/sklearn/utils/_chunking.py in <module>
     10 from .._config import get_config
---> 11 from ._param_validation import Interval, validate_params
     12 

/usr/local/lib/python3.11/dist-packages/sklearn/utils/_param_validation.py in <module>
     13 import numpy as np
---> 14 from scipy.sparse import csr_matrix, issparse
     15 

/usr/local/lib/python3.11/dist-packages/scipy/sparse/__init__.py in <module>
    293 
--> 294 from ._base import *
    295 from ._csr import *

/usr/local/lib/python3.11/dist-packages/scipy/sparse/_base.py in <module>
      4 import numpy as np
----> 5 from scipy._lib._util import VisibleDeprecationWarning
      6 

/usr/local/lib/python3.11/dist-packages/scipy/_lib/_util.py in <module>
     17 import numpy as np
---> 18 from scipy._lib._array_api import array_namespace
     19 

/usr/local/lib/python3.11/dist-packages/scipy/_lib/_array_api.py in <module>
     17 from scipy._lib.array_api_compat.array_api_compat import size
---> 18 import scipy._lib.array_api_compat.array_api_compat.numpy as array_api_compat_numpy
     19 

/usr/local/lib/python3.11/dist-packages/scipy/_lib/array_api_compat/array_api_compat/numpy/__init__.py in <module>
----> 1 from numpy import *
      2 
      3 # from numpy import * doesn't overwrite these builtin names

/usr/local/lib/python3.11/dist-packages/numpy/__init__.py in __getattr__(attr)
    366         except AssertionError:
--> 367             msg = ("The current Numpy installation ({!r}) fails to "
    368                    "pass simple sanity checks. This can be caused for example "

ModuleNotFoundError: No module named 'numpy.char'

The above exception was the direct cause of the following exception:

RuntimeError                              Traceback (most recent call last)
/usr/local/lib/python3.11/dist-packages/transformers/utils/import_utils.py in _get_module(self, module_name)
   1816         try:
-> 1817             return importlib.import_module("." + module_name, self.__name__)
   1818         except Exception as e:

/usr/lib/python3.11/importlib/__init__.py in import_module(name, package)
    125             level += 1
--> 126     return _bootstrap._gcd_import(name[level:], package, level)
    127 

/usr/lib/python3.11/importlib/_bootstrap.py in _gcd_import(name, package, level)

/usr/lib/python3.11/importlib/_bootstrap.py in _find_and_load(name, import_)

/usr/lib/python3.11/importlib/_bootstrap.py in _find_and_load_unlocked(name, import_)

/usr/lib/python3.11/importlib/_bootstrap.py in _load_unlocked(spec)

/usr/lib/python3.11/importlib/_bootstrap_external.py in exec_module(self, module)

/usr/lib/python3.11/importlib/_bootstrap.py in _call_with_frames_removed(f, *args, **kwds)

/usr/local/lib/python3.11/dist-packages/transformers/models/auto/tokenization_auto.py in <module>
     37 from ..encoder_decoder import EncoderDecoderConfig
---> 38 from .auto_factory import _LazyAutoMapping
     39 from .configuration_auto import (

/usr/local/lib/python3.11/dist-packages/transformers/models/auto/auto_factory.py in <module>
     39 if is_torch_available():
---> 40     from ...generation import GenerationMixin
     41 

/usr/lib/python3.11/importlib/_bootstrap.py in _handle_fromlist(module, fromlist, import_, recursive)

/usr/local/lib/python3.11/dist-packages/transformers/utils/import_utils.py in __getattr__(self, name)
   1804         elif name in self._class_to_module.keys():
-> 1805             module = self._get_module(self._class_to_module[name])
   1806             value = getattr(module, name)

/usr/local/lib/python3.11/dist-packages/transformers/utils/import_utils.py in _get_module(self, module_name)
   1818         except Exception as e:
-> 1819             raise RuntimeError(
   1820                 f"Failed to import {self.__name__}.{module_name} because of the following error (look up to see its"

RuntimeError: Failed to import transformers.generation.utils because of the following error (look up to see its traceback):
No module named 'numpy.char'

The above exception was the direct cause of the following exception:

RuntimeError                              Traceback (most recent call last)
<ipython-input-21-627f4f87ee20> in <cell line: 0>()
      1 import time
      2 import os
----> 3 from vllm import LLM, SamplingParams
      4 from PIL import Image
      5 from docling_core.types.doc import DoclingDocument

/usr/local/lib/python3.11/dist-packages/vllm/__init__.py in <module>
      9 import torch
     10 
---> 11 from vllm.engine.arg_utils import AsyncEngineArgs, EngineArgs
     12 from vllm.engine.async_llm_engine import AsyncLLMEngine
     13 from vllm.engine.llm_engine import LLMEngine

/usr/local/lib/python3.11/dist-packages/vllm/engine/arg_utils.py in <module>
     13 import vllm.envs as envs
     14 from vllm import version
---> 15 from vllm.config import (CacheConfig, CompilationConfig, ConfigFormat,
     16                          DecodingConfig, DeviceConfig, HfOverrides,
     17                          KVTransferConfig, LoadConfig, LoadFormat, LoRAConfig,

/usr/local/lib/python3.11/dist-packages/vllm/config.py in <module>
     25 from vllm.compilation.inductor_pass import CallableInductorPass, InductorPass
     26 from vllm.logger import init_logger
---> 27 from vllm.model_executor.layers.quantization import (QUANTIZATION_METHODS,
     28                                                      get_quantization_config)
     29 from vllm.model_executor.models import ModelRegistry

/usr/local/lib/python3.11/dist-packages/vllm/model_executor/__init__.py in <module>
      1 # SPDX-License-Identifier: Apache-2.0
      2 
----> 3 from vllm.model_executor.parameter import (BasevLLMParameter,
      4                                            PackedvLLMParameter)
      5 from vllm.model_executor.sampling_metadata import (SamplingMetadata,

/usr/local/lib/python3.11/dist-packages/vllm/model_executor/parameter.py in <module>
      7 from torch.nn import Parameter
      8 
----> 9 from vllm.distributed import get_tensor_model_parallel_rank
     10 from vllm.logger import init_logger
     11 from vllm.model_executor.utils import _make_synced_weight_loader

/usr/local/lib/python3.11/dist-packages/vllm/distributed/__init__.py in <module>
      1 # SPDX-License-Identifier: Apache-2.0
      2 
----> 3 from .communication_op import *
      4 from .parallel_state import *
      5 from .utils import *

/usr/local/lib/python3.11/dist-packages/vllm/distributed/communication_op.py in <module>
      6 import torch.distributed
      7 
----> 8 from .parallel_state import get_tp_group
      9 
     10 

/usr/local/lib/python3.11/dist-packages/vllm/distributed/parallel_state.py in <module>
     38 from torch.distributed import Backend, ProcessGroup
     39 
---> 40 import vllm.distributed.kv_transfer.kv_transfer_agent as kv_transfer
     41 import vllm.envs as envs
     42 from vllm.distributed.device_communicators.base_device_communicator import (

/usr/local/lib/python3.11/dist-packages/vllm/distributed/kv_transfer/kv_transfer_agent.py in <module>
     14 import torch
     15 
---> 16 from vllm.distributed.kv_transfer.kv_connector.factory import (
     17     KVConnectorFactory)
     18 from vllm.logger import init_logger

/usr/local/lib/python3.11/dist-packages/vllm/distributed/kv_transfer/kv_connector/factory.py in <module>
      4 from typing import TYPE_CHECKING, Callable, Dict, Type
      5 
----> 6 from .base import KVConnectorBase
      7 
      8 if TYPE_CHECKING:

/usr/local/lib/python3.11/dist-packages/vllm/distributed/kv_transfer/kv_connector/base.py in <module>
     13 import torch
     14 
---> 15 from vllm.sequence import IntermediateTensors
     16 
     17 if TYPE_CHECKING:

/usr/local/lib/python3.11/dist-packages/vllm/sequence.py in <module>
     15 import torch
     16 
---> 17 from vllm.inputs import SingletonInputs, SingletonInputsAdapter
     18 from vllm.lora.request import LoRARequest
     19 from vllm.multimodal import MultiModalDataDict, MultiModalPlaceholderDict

/usr/local/lib/python3.11/dist-packages/vllm/inputs/__init__.py in <module>
      7                    build_explicit_enc_dec_prompt, to_enc_dec_tuple_list,
      8                    token_inputs, zip_enc_dec_prompts)
----> 9 from .registry import (DummyData, InputContext, InputProcessingContext,
     10                        InputRegistry)
     11 

/usr/local/lib/python3.11/dist-packages/vllm/inputs/registry.py in <module>
     14 from vllm.logger import init_logger
     15 from vllm.transformers_utils.processor import cached_processor_from_config
---> 16 from vllm.transformers_utils.tokenizer import (AnyTokenizer,
     17                                                cached_tokenizer_from_config)
     18 from vllm.utils import (ClassRegistry, get_allowed_kwarg_only_overrides,

/usr/local/lib/python3.11/dist-packages/vllm/transformers_utils/tokenizer.py in <module>
     10 
     11 import huggingface_hub
---> 12 from transformers import (AutoTokenizer, PreTrainedTokenizer,
     13                           PreTrainedTokenizerFast)
     14 

/usr/lib/python3.11/importlib/_bootstrap.py in _handle_fromlist(module, fromlist, import_, recursive)

/usr/local/lib/python3.11/dist-packages/transformers/utils/import_utils.py in __getattr__(self, name)
   1804         elif name in self._class_to_module.keys():
   1805             module = self._get_module(self._class_to_module[name])
-> 1806             value = getattr(module, name)
   1807         elif name in self._modules:
   1808             value = self._get_module(name)

/usr/local/lib/python3.11/dist-packages/transformers/utils/import_utils.py in __getattr__(self, name)
   1803             value = Placeholder
   1804         elif name in self._class_to_module.keys():
-> 1805             module = self._get_module(self._class_to_module[name])
   1806             value = getattr(module, name)
   1807         elif name in self._modules:

/usr/local/lib/python3.11/dist-packages/transformers/utils/import_utils.py in _get_module(self, module_name)
   1817             return importlib.import_module("." + module_name, self.__name__)
   1818         except Exception as e:
-> 1819             raise RuntimeError(
   1820                 f"Failed to import {self.__name__}.{module_name} because of the following error (look up to see its"
   1821                 f" traceback):\n{e}"

RuntimeError: Failed to import transformers.models.auto.tokenization_auto because of the following error (look up to see its traceback):
Failed to import transformers.generation.utils because of the following error (look up to see its traceback):
No module named 'numpy.char'

The script appears to install a 1.x version of Numpy and attempt to import numpy.char, but according to this github discussion, numpy.char cannot be imported as a module on numpy versions 1.x, but it can on 2.x, but I cant get it to work with numpy of that version. I also tried install SciPy 1.12 as suggested on the github discussion, but that didn't work either. I would appreciate any help.

Sign up or log in to comment