Skip to content

Commit ef7350d

Browse files
Isotr0pylk-chen
authored andcommitted
[Bugfix] Fix failing transformers dynamic module resolving with spawn multiproc method (vllm-project#13403)
Signed-off-by: Isotr0py <2037008807@qq.com> Signed-off-by: Linkun Chen <github@lkchen.net>
1 parent 4931ea0 commit ef7350d

File tree

1 file changed

+17
-8
lines changed
  • vllm/model_executor/model_loader

1 file changed

+17
-8
lines changed

vllm/model_executor/model_loader/utils.py

+17-8
Original file line numberDiff line numberDiff line change
@@ -47,22 +47,31 @@ def resolve_transformers_fallback(model_config: ModelConfig,
4747
for i, arch in enumerate(architectures):
4848
if arch == "TransformersModel":
4949
continue
50-
custom_module = None
51-
auto_map = getattr(model_config.hf_config, "auto_map", None)
52-
if auto_map is not None and "AutoModel" in auto_map:
53-
custom_module = get_class_from_dynamic_module(
54-
model_config.hf_config.auto_map["AutoModel"],
55-
model_config.model)
50+
auto_map: dict[str, str] = getattr(model_config.hf_config, "auto_map",
51+
None) or dict()
52+
# Make sure that config class is always initialized before model class,
53+
# otherwise the model class won't be able to access the config class,
54+
# the expected auto_map should have correct order like:
55+
# "auto_map": {
56+
# "AutoConfig": "<your-repo-name>--<config-name>",
57+
# "AutoModel": "<your-repo-name>--<config-name>",
58+
# "AutoModelFor<Task>": "<your-repo-name>--<config-name>",
59+
# },
60+
auto_modules = {
61+
name: get_class_from_dynamic_module(module, model_config.model)
62+
for name, module in sorted(auto_map.items(), key=lambda x: x[0])
63+
}
64+
custom_model_module = auto_modules.get("AutoModel")
5665
# TODO(Isotr0py): Further clean up these raises.
5766
# perhaps handled them in _ModelRegistry._raise_for_unsupported?
5867
if model_config.model_impl == ModelImpl.TRANSFORMERS:
59-
if not is_transformers_impl_compatible(arch, custom_module):
68+
if not is_transformers_impl_compatible(arch, custom_model_module):
6069
raise ValueError(
6170
f"The Transformers implementation of {arch} is not "
6271
"compatible with vLLM.")
6372
architectures[i] = "TransformersModel"
6473
if model_config.model_impl == ModelImpl.AUTO:
65-
if not is_transformers_impl_compatible(arch, custom_module):
74+
if not is_transformers_impl_compatible(arch, custom_model_module):
6675
raise ValueError(
6776
f"{arch} has no vLLM implementation and the Transformers "
6877
"implementation is not compatible with vLLM.")

0 commit comments

Comments
 (0)