Skip to content

Commit

Permalink
Apply suggestions from code review
Browse files Browse the repository at this point in the history
Co-authored-by: Daniele <[email protected]>
  • Loading branch information
prashantgupta24 and dtrifiro authored Jun 27, 2024
1 parent 5b3fc82 commit 442e2a7
Showing 1 changed file with 18 additions and 12 deletions.
30 changes: 18 additions & 12 deletions src/vllm_tgis_adapter/grpc/grpc_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,16 @@
from vllm.engine.async_llm_engine import _AsyncLLMEngine
from vllm.entrypoints.openai.serving_completion import merge_async_iterators
from vllm.inputs import TextTokensPrompt
from vllm.tracing import (
contains_trace_headers,
extract_trace_headers,
log_tracing_disabled_warning,
try:
from vllm.tracing import (
contains_trace_headers,
extract_trace_headers,
log_tracing_disabled_warning,
)
except ImportError:
_vllm_tracing_available = False
else:
_vllm_tracing_available = True

from vllm_tgis_adapter.logging import init_logger
from vllm_tgis_adapter.tgis_utils import logs
Expand Down Expand Up @@ -229,19 +234,20 @@ async def Generate(
prompt=req.text,
prompt_token_ids=input_ids,
)
is_tracing_enabled = await self.engine.is_tracing_enabled()
headers = dict(context.invocation_metadata())
trace_headers = None
if is_tracing_enabled:
trace_headers = extract_trace_headers(headers)
elif contains_trace_headers(headers):
log_tracing_disabled_warning()
if _vllm_tracing_available:
is_tracing_enabled = await self.engine.is_tracing_enabled()
headers = dict(context.invocation_metadata())
trace_headers = None
if is_tracing_enabled:
trace_headers = extract_trace_headers(headers)
elif contains_trace_headers(headers):
log_tracing_disabled_warning()
generators.append(
self.engine.generate(
inputs=inputs,
sampling_params=sampling_params,
request_id=f"{request_id}-{i}",
trace_headers=trace_headers,
**kwargs,
**adapter_kwargs,
),
)
Expand Down

0 comments on commit 442e2a7

Please sign in to comment.