diff --git a/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py b/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py index 59b24cf945c62..2496cc2e0c7e0 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py +++ b/llama-index-integrations/llms/llama-index-llms-mistralai/llama_index/llms/mistralai/base.py @@ -94,6 +94,10 @@ class MistralAI(FunctionCallingLLM): # otherwise it will lookup MISTRAL_API_KEY from your env variable # llm = MistralAI(api_key="") + # You can specify a custom endpoint by passing the `endpoint` variable or setting + # MISTRAL_ENDPOINT in your environment + # llm = MistralAI(endpoint="") + llm = MistralAI() resp = llm.complete("Paul Graham is ") @@ -163,7 +167,9 @@ def __init__( ) # Use the custom endpoint if provided, otherwise default to DEFAULT_MISTRALAI_ENDPOINT - endpoint = endpoint or DEFAULT_MISTRALAI_ENDPOINT + endpoint = get_from_param_or_env( + "endpoint", endpoint, "MISTRAL_ENDPOINT", DEFAULT_MISTRALAI_ENDPOINT + ) super().__init__( temperature=temperature, diff --git a/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml index 985addcebdbfb..85e85ff4c96c2 100644 --- a/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-mistralai/pyproject.toml @@ -27,7 +27,7 @@ exclude = ["**/BUILD"] license = "MIT" name = "llama-index-llms-mistralai" readme = "README.md" -version = "0.2.4" +version = "0.2.5" [tool.poetry.dependencies] python = ">=3.9,<4.0"