Skip to content

Commit

Permalink
fixes pre-commit errors
Browse files Browse the repository at this point in the history
Signed-off-by: Gabriel Marinho <gmarinho@ibm.com>
  • Loading branch information
gmarinho2 committed Feb 7, 2025
1 parent cfbc4b9 commit c4762c7
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions vllm/entrypoints/openai/serving_score.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
from vllm.inputs.data import TokensPrompt
from vllm.logger import init_logger
from vllm.lora.request import LoRARequest
from vllm.outputs import (PoolingOutput, PoolingRequestOutput,
ScoringRequestOutput)
from vllm.outputs import PoolingRequestOutput, ScoringRequestOutput
from vllm.prompt_adapter.request import PromptAdapterRequest
from vllm.transformers_utils.tokenizer import (AnyTokenizer, MistralTokenizer,
PreTrainedTokenizer,
Expand Down Expand Up @@ -187,12 +186,13 @@ async def _embedding_score(

# Non-streaming response
final_res_batch: List[Optional[PoolingRequestOutput]] = []
embeddings: List[PoolingRequestOutput[PoolingOutput]]

num_embeddings = len(engine_prompts) * 2

embeddings: List[Optional[PoolingRequestOutput]]
embeddings = [None] * num_embeddings

try:
embeddings = [None] * num_embeddings

async for i, res in result_generator:
embeddings[i] = res
Expand Down

0 comments on commit c4762c7

Please sign in to comment.