Skip to content

Commit

Permalink
Merge branch 'main' into chat-embeddings-api
Browse files Browse the repository at this point in the history
  • Loading branch information
DarkLight1337 committed Oct 31, 2024
2 parents 8c8ee96 + 890ca36 commit c3ba030
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion vllm/entrypoints/openai/api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,7 @@ async def run_server(args, **uvicorn_kwargs) -> None:
# This avoids race conditions with ray.
# see https://github.com/vllm-project/vllm/issues/8204
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((args.host, args.port))
sock.bind(("", args.port))

def signal_handler(*_) -> None:
# Interrupt server on sigterm while initializing
Expand Down
2 changes: 1 addition & 1 deletion vllm/entrypoints/openai/cli_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def __call__(
def make_arg_parser(parser: FlexibleArgumentParser) -> FlexibleArgumentParser:
parser.add_argument("--host",
type=nullable_str,
default="0.0.0.0",
default=None,
help="host name")
parser.add_argument("--port", type=int, default=8000, help="port number")
parser.add_argument(
Expand Down

0 comments on commit c3ba030

Please sign in to comment.