From 322d2a27d66eb1896272e76640e5dfdec6fb4fcc Mon Sep 17 00:00:00 2001 From: Andy Lo Date: Sun, 23 Feb 2025 00:51:13 +0000 Subject: [PATCH] [BugFix] Minor: logger import in attention backend (#13706) Signed-off-by: Andy Lo --- vllm/attention/backends/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vllm/attention/backends/utils.py b/vllm/attention/backends/utils.py index 5c1f9916..baf01c92 100644 --- a/vllm/attention/backends/utils.py +++ b/vllm/attention/backends/utils.py @@ -12,12 +12,12 @@ from vllm import envs from vllm.attention import (AttentionMetadata, AttentionMetadataBuilder, AttentionState) from vllm.attention.backends.abstract import AttentionType -from vllm.logger import logging +from vllm.logger import init_logger from vllm.multimodal import MultiModalPlaceholderMap from vllm.platforms import current_platform from vllm.utils import async_tensor_h2d, make_tensor_with_pad -logger = logging.getLogger(__name__) +logger = init_logger(__name__) if TYPE_CHECKING: from vllm.worker.model_runner_base import ModelRunnerBase