Skip to content

Commit

Permalink
Fix model_worker error (#2404)
Browse files Browse the repository at this point in the history
  • Loading branch information
wangxiyuan authored Sep 12, 2023
1 parent 11b05bb commit a8088ba
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions fastchat/serve/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ def generate_stream(

past_key_values = out = None
sent_interrupt = False
finish_reason = None
for i in range(max_new_tokens):
if i == 0: # prefill
if model.config.is_encoder_decoder:
Expand Down Expand Up @@ -240,12 +241,11 @@ def generate_stream(
break

# Finish stream event, which contains finish reason
if i == max_new_tokens - 1:
else:
finish_reason = "length"
elif stopped:

if stopped:
finish_reason = "stop"
else:
finish_reason = None

yield {
"text": output,
Expand Down

0 comments on commit a8088ba

Please sign in to comment.