Skip to content

Commit

Permalink
change name to match REST
Browse files Browse the repository at this point in the history
  • Loading branch information
magdyksaleh committed Feb 21, 2024
1 parent bd4137a commit 6f00aae
Showing 1 changed file with 12 additions and 12 deletions.
24 changes: 12 additions & 12 deletions clients/python/lorax/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def generate(
watermark: bool = False,
response_format: Optional[Union[Dict[str, Any], ResponseFormat]] = None,
decoder_input_details: bool = False,
generated_token_details: bool = True,
details: bool = True,
) -> Response:
"""
Given a prompt, generate the following text
Expand Down Expand Up @@ -139,7 +139,7 @@ def generate(
```
decoder_input_details (`bool`):
Return the decoder input token logprobs and ids
generated_token_details (`bool`):
details (`bool`):
Return the token logprobs and ids for generated tokens
Returns:
Expand All @@ -152,7 +152,7 @@ def generate(
merged_adapters=merged_adapters,
api_token=api_token,
best_of=best_of,
details=generated_token_details,
details=details,
do_sample=do_sample,
max_new_tokens=max_new_tokens,
repetition_penalty=repetition_penalty,
Expand Down Expand Up @@ -205,7 +205,7 @@ def generate_stream(
typical_p: Optional[float] = None,
watermark: bool = False,
response_format: Optional[Union[Dict[str, Any], ResponseFormat]] = None,
generated_token_details: bool = True,
details: bool = True,
) -> Iterator[StreamResponse]:
"""
Given a prompt, generate the following stream of tokens
Expand Down Expand Up @@ -259,7 +259,7 @@ def generate_stream(
}
}
```
generated_token_details (`bool`):
details (`bool`):
Return the token logprobs and ids for generated tokens
Returns:
Expand All @@ -272,7 +272,7 @@ def generate_stream(
merged_adapters=merged_adapters,
api_token=api_token,
best_of=None,
details=generated_token_details,
details=details,
decoder_input_details=False,
do_sample=do_sample,
max_new_tokens=max_new_tokens,
Expand Down Expand Up @@ -390,7 +390,7 @@ async def generate(
watermark: bool = False,
response_format: Optional[Union[Dict[str, Any], ResponseFormat]] = None,
decoder_input_details: bool = False,
generated_token_details: bool = True,
details: bool = True,
) -> Response:
"""
Given a prompt, generate the following text asynchronously
Expand Down Expand Up @@ -448,7 +448,7 @@ async def generate(
```
decoder_input_details (`bool`):
Return the decoder input token logprobs and ids
generated_token_details (`bool`):
details (`bool`):
Return the token logprobs and ids for generated tokens
Returns:
Expand All @@ -461,7 +461,7 @@ async def generate(
merged_adapters=merged_adapters,
api_token=api_token,
best_of=best_of,
details=generated_token_details,
details=details,
decoder_input_details=decoder_input_details,
do_sample=do_sample,
max_new_tokens=max_new_tokens,
Expand Down Expand Up @@ -509,7 +509,7 @@ async def generate_stream(
typical_p: Optional[float] = None,
watermark: bool = False,
response_format: Optional[Union[Dict[str, Any], ResponseFormat]] = None,
generated_token_details: bool = True,
details: bool = True,
) -> AsyncIterator[StreamResponse]:
"""
Given a prompt, generate the following stream of tokens asynchronously
Expand Down Expand Up @@ -563,7 +563,7 @@ async def generate_stream(
}
}
```
generated_token_details (`bool`):
details (`bool`):
Return the token logprobs and ids for generated tokens
Returns:
Expand All @@ -576,7 +576,7 @@ async def generate_stream(
merged_adapters=merged_adapters,
api_token=api_token,
best_of=None,
details=generated_token_details,
details=details,
decoder_input_details=False,
do_sample=do_sample,
max_new_tokens=max_new_tokens,
Expand Down

0 comments on commit 6f00aae

Please sign in to comment.