Skip to content

Commit

Permalink
[tpu][misc] fix typo (vllm-project#8260)
Browse files Browse the repository at this point in the history
  • Loading branch information
youkaichao authored and dtrifiro committed Sep 12, 2024
1 parent b17d45c commit bbee60b
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
4 changes: 2 additions & 2 deletions tests/compile/test_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import torch

from vllm.compilation.wrapper import TorchCompileWrapperWithCustomDispacther
from vllm.compilation.wrapper import TorchCompileWrapperWithCustomDispatcher


class MyMod(torch.nn.Module):
Expand All @@ -13,7 +13,7 @@ def forward(self, x: torch.Tensor, cache: Optional[torch.Tensor] = None):
return x * 2


class MyWrapper(TorchCompileWrapperWithCustomDispacther):
class MyWrapper(TorchCompileWrapperWithCustomDispatcher):

def __init__(self, model):
self.model = model
Expand Down
2 changes: 1 addition & 1 deletion vllm/compilation/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import vllm.envs as envs


class TorchCompileWrapperWithCustomDispacther:
class TorchCompileWrapperWithCustomDispatcher:
"""
A wrapper class for torch.compile, with a custom dispatch logic.
Subclasses should:
Expand Down
4 changes: 2 additions & 2 deletions vllm/worker/tpu_model_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import torch_xla.runtime as xr

from vllm.attention import AttentionMetadata, get_attn_backend
from vllm.compilation.wrapper import TorchCompileWrapperWithCustomDispacther
from vllm.compilation.wrapper import TorchCompileWrapperWithCustomDispatcher
from vllm.config import (CacheConfig, DeviceConfig, LoadConfig, ModelConfig,
ParallelConfig, SchedulerConfig)
from vllm.logger import init_logger
Expand Down Expand Up @@ -611,7 +611,7 @@ def _execute_model(*args):
return [SamplerOutput(sampler_outputs)]


class ModelWrapper(TorchCompileWrapperWithCustomDispacther):
class ModelWrapper(TorchCompileWrapperWithCustomDispatcher):

def __init__(self, model: nn.Module):
self.model = model
Expand Down

0 comments on commit bbee60b

Please # to comment.