Skip to content

Commit

Permalink
fix test
Browse files Browse the repository at this point in the history
  • Loading branch information
comaniac committed Jul 16, 2024
1 parent b33d9a4 commit 5680452
Showing 1 changed file with 9 additions and 5 deletions.
14 changes: 9 additions & 5 deletions tests/spec_decode/e2e/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,8 @@ def generator_inner():

# Override logging interval to 0 for spec decode test run to
# log all metrics in time.
if baseline_or_test == "test" and not use_async:
if (baseline_or_test == "test" and not use_async
and llm.llm_engine.log_stats):
for sate_logger in llm.llm_engine.stat_loggers.values():
sate_logger.local_interval = 0
set_random_seed(seed)
Expand Down Expand Up @@ -228,10 +229,13 @@ def get_output_from_llm_generator(
outputs = llm.generate(prompts, sampling_params, use_tqdm=True)
token_ids = [output.outputs[0].token_ids for output in outputs]
tokens = [output.outputs[0].text for output in outputs]
stat_logger = llm.llm_engine.stat_loggers["prometheus"]
acceptance_rate = (
stat_logger.metrics.gauge_spec_decode_draft_acceptance_rate.labels(
**stat_logger.labels)._value.get())

# Fetch acceptance rate if logging is enabled.
if llm.llm_engine.log_stats:
stat_logger = llm.llm_engine.stat_loggers["prometheus"]
acceptance_rate = (stat_logger.metrics.
gauge_spec_decode_draft_acceptance_rate.labels(
**stat_logger.labels)._value.get())
del llm

return tokens, token_ids, acceptance_rate
Expand Down

0 comments on commit 5680452

Please sign in to comment.