Skip to content

Commit

Permalink
[Bugfix] Fix encoding_format in examples/openai_embedding_client.py (v…
Browse files Browse the repository at this point in the history
  • Loading branch information
CatherineSue authored Jul 25, 2024
1 parent 0310029 commit 316a41a
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
13 changes: 8 additions & 5 deletions examples/openai_embedding_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,14 @@
models = client.models.list()
model = models.data[0].id

responses = client.embeddings.create(input=[
"Hello my name is",
"The best thing about vLLM is that it supports many different models"
],
model=model)
responses = client.embeddings.create(
input=[
"Hello my name is",
"The best thing about vLLM is that it supports many different models"
],
model=model,
encoding_format="float",
)

for data in responses.data:
print(data.embedding) # list of float of len 4096
1 change: 0 additions & 1 deletion tests/entrypoints/openai/test_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ def embedding_server():
"--enforce-eager",
"--max-model-len",
"8192",
"--enforce-eager",
]

with RemoteOpenAIServer(EMBEDDING_MODEL_NAME, args) as remote_server:
Expand Down

0 comments on commit 316a41a

Please sign in to comment.