Missing configuration_bitnet.py.
does not appear to have a file named configuration_bitnet.py. Checkout 'https://huggingface.co/microsoft/bitnet-b1.58-2B-4T/tree/main' for available files.
Please use this version of transformers:
pip install git+https://github.com/shumingma/transformers.git
and ensure that 'trust_remote_code=True' is NOT used from the from_pretrained
function call.
Could not locate the configuration_bitnet.py inside microsoft/bitnet-b1.58-2B-4T.
Traceback (most recent call last):
File "", line 198, in _run_module_as_main
File "", line 88, in _run_code
File "/usr/local/lib/python3.12/dist-packages/vllm/entrypoints/openai/api_server.py", line 1121, in
uvloop.run(run_server(args))
File "/usr/local/lib/python3.12/dist-packages/uvloop/init.py", line 109, in run
return __asyncio.run(
^^^^^^^^^^^^^^
File "/usr/lib/python3.12/asyncio/runners.py", line 195, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/usr/lib/python3.12/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "uvloop/loop.pyx", line 1518, in uvloop.loop.Loop.run_until_complete
File "/usr/local/lib/python3.12/dist-packages/uvloop/init.py", line 61, in wrapper
return await main
^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/vllm/entrypoints/openai/api_server.py", line 1069, in run_server
async with build_async_engine_client(args) as engine_client:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.12/contextlib.py", line 210, in aenter
return await anext(self.gen)
^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/vllm/entrypoints/openai/api_server.py", line 146, in build_async_engine_client
async with build_async_engine_client_from_engine_args(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.12/contextlib.py", line 210, in aenter
return await anext(self.gen)
^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/vllm/entrypoints/openai/api_server.py", line 166, in build_async_engine_client_from_engine_args
vllm_config = engine_args.create_engine_config(usage_context=usage_context)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/vllm/engine/arg_utils.py", line 1154, in create_engine_config
model_config = self.create_model_config()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/vllm/engine/arg_utils.py", line 1042, in create_model_config
return ModelConfig(
^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/vllm/config.py", line 423, in init
hf_config = get_config(self.hf_config_path or self.model,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/vllm/transformers_utils/config.py", line 307, in get_config
config = AutoConfig.from_pretrained(
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/transformers/models/auto/configuration_auto.py", line 1144, in from_pretrained
config_class = get_class_from_dynamic_module(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/transformers/dynamic_module_utils.py", line 558, in get_class_from_dynamic_module
final_module = get_cached_module_file(
^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/transformers/dynamic_module_utils.py", line 362, in get_cached_module_file
resolved_module_file = cached_file(
^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/transformers/utils/hub.py", line 312, in cached_file
file = cached_files(path_or_repo_id=path_or_repo_id, filenames=[filename], **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/transformers/utils/hub.py", line 573, in cached_files
raise EnvironmentError(
OSError: microsoft/bitnet-b1.58-2B-4T does not appear to have a file named configuration_bitnet.py. Checkout 'https://huggingface.co/microsoft/bitnet-b1.58-2B-4T/tree/main'for available files.
ValueError Traceback (most recent call last)
File ~/dl-env/lib/python3.10/site-packages/transformers/utils/import_utils.py:1982, in _LazyModule._get_module(self, module_name)
1981 try:
-> 1982 return importlib.import_module("." + module_name, self.name)
1983 except Exception as e:
File /opt/homebrew/Cellar/[email protected]/3.10.16/Frameworks/Python.framework/Versions/3.10/lib/python3.10/importlib/init.py:126, in import_module(name, package)
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)
File :1050, in _gcd_import(name, package, level)
File :1027, in find_and_load(name, import)
File :1006, in find_and_load_unlocked(name, import)
File :688, in _load_unlocked(spec)
File :883, in exec_module(self, module)
File :241, in _call_with_frames_removed(f, *args, **kwds)
File ~/dl-env/lib/python3.10/site-packages/transformers/generation/utils.py:30
28 from torch.nn import functional as F
---> 30 from transformers.generation.candidate_generator import AssistantVocabTranslatorCache
32 from ..cache_utils import (
33 Cache,
34 DynamicCache,
(...)
40 StaticCache,
41 )
File ~/dl-env/lib/python3.10/site-packages/transformers/generation/candidate_generator.py:29
28 if is_sklearn_available():
---> 29 from sklearn.metrics import roc_curve
31 from ..cache_utils import DynamicCache
File ~/dl-env/lib/python3.10/site-packages/sklearn/init.py:84
80 from . import (
81 __check_build, # noqa: F401
82 _distributor_init, # noqa: F401
83 )
---> 84 from .base import clone
85 from .utils._show_versions import show_versions
File ~/dl-env/lib/python3.10/site-packages/sklearn/base.py:19
18 from .exceptions import InconsistentVersionWarning
---> 19 from .utils._estimator_html_repr import _HTMLDocumentationLinkMixin, estimator_html_repr
20 from .utils._metadata_requests import _MetadataRequester, _routing_enabled
File ~/dl-env/lib/python3.10/site-packages/sklearn/utils/init.py:11
10 from ._bunch import Bunch
---> 11 from ._chunking import gen_batches, gen_even_slices
12 from ._estimator_html_repr import estimator_html_repr
File ~/dl-env/lib/python3.10/site-packages/sklearn/utils/_chunking.py:8
7 from .._config import get_config
----> 8 from ._param_validation import Interval, validate_params
11 def chunk_generator(gen, chunksize):
File ~/dl-env/lib/python3.10/site-packages/sklearn/utils/_param_validation.py:14
13 from .._config import config_context, get_config
---> 14 from .validation import _is_arraylike_not_scalar
17 class InvalidParameterError(ValueError, TypeError):
File ~/dl-env/lib/python3.10/site-packages/sklearn/utils/validation.py:26
25 from ..exceptions import DataConversionWarning, NotFittedError, PositiveSpectrumWarning
---> 26 from ..utils._array_api import _asarray_with_order, _is_numpy_namespace, get_namespace
27 from ..utils.fixes import ComplexWarning, _preserve_dia_indices_dtype
File ~/dl-env/lib/python3.10/site-packages/sklearn/utils/_array_api.py:11
10 from .._config import get_config
---> 11 from .fixes import parse_version
13 _NUMPY_NAMESPACE_NAMES = {"numpy", "array_api_compat.numpy"}
File ~/dl-env/lib/python3.10/site-packages/sklearn/utils/fixes.py:24
23 try:
---> 24 import pandas as pd
25 except ImportError:
File ~/dl-env/lib/python3.10/site-packages/pandas/init.py:22
21 # numpy compat
---> 22 from pandas.compat import (
23 np_version_under1p18 as _np_version_under1p18,
24 is_numpy_dev as _is_numpy_dev,
25 )
27 try:
File ~/dl-env/lib/python3.10/site-packages/pandas/compat/init.py:15
14 from pandas._typing import F
---> 15 from pandas.compat.numpy import (
16 is_numpy_dev,
17 np_array_datetime64_compat,
18 np_datetime64_compat,
19 np_version_under1p18,
20 np_version_under1p19,
21 np_version_under1p20,
22 )
23 from pandas.compat.pyarrow import (
24 pa_version_under1p0,
25 pa_version_under2p0,
26 pa_version_under3p0,
27 pa_version_under4p0,
28 )
File ~/dl-env/lib/python3.10/site-packages/pandas/compat/numpy/init.py:7
5 import numpy as np
----> 7 from pandas.util.version import Version
9 # numpy versioning
File ~/dl-env/lib/python3.10/site-packages/pandas/util/init.py:1
----> 1 from pandas.util._decorators import ( # noqa
2 Appender,
3 Substitution,
4 cache_readonly,
5 )
7 from pandas.core.util.hashing import ( # noqa
8 hash_array,
9 hash_pandas_object,
10 )
File ~/dl-env/lib/python3.10/site-packages/pandas/util/_decorators.py:14
12 import warnings
---> 14 from pandas._libs.properties import cache_readonly # noqa
15 from pandas._typing import F
File ~/dl-env/lib/python3.10/site-packages/pandas/_libs/init.py:13
1 all = [
2 "NaT",
3 "NaTType",
(...)
9 "Interval",
10 ]
---> 13 from pandas._libs.interval import Interval
14 from pandas._libs.tslibs import (
15 NaT,
16 NaTType,
(...)
21 iNaT,
22 )
File ~/dl-env/lib/python3.10/site-packages/pandas/_libs/interval.pyx:1, in init pandas._libs.interval()
ValueError: numpy.dtype size changed, may indicate binary incompatibility. Expected 96 from C header, got 88 from PyObject
The above exception was the direct cause of the following exception:
RuntimeError Traceback (most recent call last)
File ~/dl-env/lib/python3.10/site-packages/transformers/utils/import_utils.py:1982, in _LazyModule._get_module(self, module_name)
1981 try:
-> 1982 return importlib.import_module("." + module_name, self.name)
1983 except Exception as e:
File /opt/homebrew/Cellar/[email protected]/3.10.16/Frameworks/Python.framework/Versions/3.10/lib/python3.10/importlib/init.py:126, in import_module(name, package)
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)
File :1050, in _gcd_import(name, package, level)
File :1027, in find_and_load(name, import)
File :1006, in find_and_load_unlocked(name, import)
File :688, in _load_unlocked(spec)
File :883, in exec_module(self, module)
File :241, in _call_with_frames_removed(f, *args, **kwds)
File ~/dl-env/lib/python3.10/site-packages/transformers/models/auto/modeling_auto.py:21
20 from ...utils import logging
---> 21 from .auto_factory import (
22 _BaseAutoBackboneClass,
23 _BaseAutoModelClass,
24 _LazyAutoMapping,
25 auto_class_update,
26 )
27 from .configuration_auto import CONFIG_MAPPING_NAMES
File ~/dl-env/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:40
39 if is_torch_available():
---> 40 from ...generation import GenerationMixin
43 logger = logging.get_logger(name)
File :1075, in handle_fromlist(module, fromlist, import, recursive)
File ~/dl-env/lib/python3.10/site-packages/transformers/utils/import_utils.py:1970, in _LazyModule.getattr(self, name)
1969 elif name in self._class_to_module.keys():
-> 1970 module = self._get_module(self._class_to_module[name])
1971 value = getattr(module, name)
File ~/dl-env/lib/python3.10/site-packages/transformers/utils/import_utils.py:1984, in _LazyModule._get_module(self, module_name)
1983 except Exception as e:
-> 1984 raise RuntimeError(
1985 f"Failed to import {self.name}.{module_name} because of the following error (look up to see its"
1986 f" traceback):\n{e}"
1987 ) from e
RuntimeError: Failed to import transformers.generation.utils because of the following error (look up to see its traceback):
numpy.dtype size changed, may indicate binary incompatibility. Expected 96 from C header, got 88 from PyObject
The above exception was the direct cause of the following exception:
RuntimeError Traceback (most recent call last)
Cell In[3], line 2
1 import torch
----> 2 from transformers import AutoModelForCausalLM, AutoTokenizer
4 model_id = "microsoft/bitnet-b1.58-2B-4T"
6 # Load tokenizer and model
File :1075, in handle_fromlist(module, fromlist, import, recursive)
File ~/dl-env/lib/python3.10/site-packages/transformers/utils/import_utils.py:1970, in _LazyModule.getattr(self, name)
1968 value = Placeholder
1969 elif name in self._class_to_module.keys():
-> 1970 module = self._get_module(self._class_to_module[name])
1971 value = getattr(module, name)
1972 elif name in self._modules:
File ~/dl-env/lib/python3.10/site-packages/transformers/utils/import_utils.py:1984, in _LazyModule._get_module(self, module_name)
1982 return importlib.import_module("." + module_name, self.name)
1983 except Exception as e:
-> 1984 raise RuntimeError(
1985 f"Failed to import {self.name}.{module_name} because of the following error (look up to see its"
1986 f" traceback):\n{e}"
1987 ) from e
RuntimeError: Failed to import transformers.models.auto.modeling_auto because of the following error (look up to see its traceback):
Failed to import transformers.generation.utils because of the following error (look up to see its traceback):
numpy.dtype size changed, may indicate binary incompatibility. Expected 96 from C header, got 88 from PyObject
from transformers.models.bitnet.modular_bitnet import BitNetForCausalLM
import torch
from transformers import AutoTokenizer
model_id = "microsoft/bitnet-b1.58-2B-4T"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = BitNetForCausalLM.from_pretrained(
model_id,
torch_dtype=torch.bfloat16,trust_remote_code=True
)
try this together with !pip install git+https://github.com/shumingma/transformers.git
you may need !pip install accelerate