From ca5d5d1c0446b526a853a33520b3ebc9fe58f908 Mon Sep 17 00:00:00 2001 From: Chen Zhang Date: Tue, 14 Jan 2025 20:59:32 +0800 Subject: [PATCH] [Kernel] Revert the API change of Attention.forward (#12038) Signed-off-by: Chen Zhang --- vllm/attention/layer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vllm/attention/layer.py b/vllm/attention/layer.py index c7e7a4d52e5a7..a06db075f334d 100644 --- a/vllm/attention/layer.py +++ b/vllm/attention/layer.py @@ -134,8 +134,8 @@ def forward( query: torch.Tensor, key: torch.Tensor, value: torch.Tensor, - _kv_cache: torch.Tensor, - _attn_metadata: AttentionMetadata, + kv_cache: torch.Tensor, + attn_metadata: AttentionMetadata, ) -> torch.Tensor: if self.use_output: output = torch.empty_like(query)