Skip to content

Commit

Permalink
Add docstrings for GoogleAIGeminiGenerator and GoogleAIGeminiChatGene…
Browse files Browse the repository at this point in the history
…rator
  • Loading branch information
silvanocerza committed Jan 5, 2024
1 parent 3e314ea commit 1bc67db
Show file tree
Hide file tree
Showing 2 changed files with 170 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,83 @@

@component
class GoogleAIGeminiChatGenerator:
"""
GoogleAIGeminiGenerator is a multi modal generator supporting Gemini via Google Makersuite.
Sample usage:
```python
from haystack.dataclasses.chat_message import ChatMessage
from google_ai_haystack.generators.chat.gemini import GoogleAIGeminiChatGenerator
gemini_chat = GoogleAIGeminiChatGenerator(model="gemini-pro", api_key="<MY_API_KEY>")
messages = [ChatMessage.from_user("What is the most interesting thing you know?")]
res = gemini_chat.run(messages=messages)
for reply in res["replies"]:
print(reply.content)
messages += res["replies"] + [ChatMessage.from_user("Tell me more about it")]
res = gemini_chat.run(messages=messages)
for reply in res["replies"]:
print(reply.content)
```
This is a more advanced usage that also uses function calls:
```python
from haystack.dataclasses.chat_message import ChatMessage
from google.ai.generativelanguage import FunctionDeclaration, Tool
from google_ai_haystack.generators.chat.gemini import GoogleAIGeminiChatGenerator
# Example function to get the current weather
def get_current_weather(location: str, unit: str = "celsius") -> str:
# Call a weather API and return some text
...
# Define the function interface so that Gemini can call it
get_current_weather_func = FunctionDeclaration(
name="get_current_weather",
description="Get the current weather in a given location",
parameters={
"type": "object",
"properties": {
"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"},
"unit": {
"type": "string",
"enum": [
"celsius",
"fahrenheit",
],
},
},
"required": ["location"],
},
)
tool = Tool([get_current_weather_func])
messages = [ChatMessage.from_user("What is the most interesting thing you know?")]
gemini_chat = GoogleAIGeminiChatGenerator(model="gemini-pro", api_key="<MY_API_KEY>", tools=[tool])
messages = [ChatMessage.from_user(content = "What is the temperature in celsius in Berlin?")]
res = gemini_chat.run(messages=messages)
weather = get_current_weather(**res["replies"][0].content)
messages += res["replies"] + [ChatMessage.from_function(content=weather, name="get_current_weather")]
res = gemini_chat.run(messages=messages)
for reply in res["replies"]:
print(reply.content)
```
Input:
- **messages** A list of ChatMessage objects.
Output:
- **replies** A list of ChatMessage objects containing the one or more replies from the model.
"""

def __init__(
self,
*,
Expand All @@ -25,7 +102,31 @@ def __init__(
tools: Optional[List[Tool]] = None,
):
"""
Multi modal generator using Gemini model via Makersuite
Initialize a GoogleAIGeminiChatGenerator instance.
If `api_key` is `None` it will use the `GOOGLE_API_KEY` env variable for authentication.
To get an API key, visit: https://makersuite.google.com
It supports the following models:
* `gemini-pro`
* `gemini-pro-vision`
* `gemini-ultra`
:param api_key: Google Makersuite API key, defaults to None
:param model: Name of the model to use, defaults to "gemini-pro-vision"
:param generation_config: The generation config to use, defaults to None.
Can either be a GenerationConfig object or a dictionary of parameters.
Accepted fields are:
- temperature
- top_p
- top_k
- candidate_count
- max_output_tokens
- stop_sequences
:param safety_settings: The safety settings to use, defaults to None.
A dictionary of HarmCategory to HarmBlockThreshold.
:param tools: The tools to use, defaults to None.
A list of Tool objects that can be used to modify the generation process.
"""

# Authenticate, if api_key is None it will use the GOOGLE_API_KEY env variable
Expand Down
70 changes: 68 additions & 2 deletions integrations/google_ai/src/google_ai_haystack/generators/gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,49 @@

@component
class GoogleAIGeminiGenerator:
"""
GoogleAIGeminiGenerator is a multi modal generator supporting Gemini via Google Makersuite.
Sample usage:
```python
from google_ai_haystack.generators.gemini import GoogleAIGeminiGenerator
gemini = GoogleAIGeminiGenerator(model="gemini-pro", api_key="<MY_API_KEY>")
res = gemini.run(parts = ["What is the most interesting thing you know?"])
for answer in res["answers"]:
print(answer)
```
This is a more advanced usage that also uses text and images as input:
```python
import requests
from haystack.dataclasses.byte_stream import ByteStream
from google_ai_haystack.generators.gemini import GoogleAIGeminiGenerator
URLS = [
"https://raw.githubusercontent.com/silvanocerza/robots/main/robot1.jpg",
"https://raw.githubusercontent.com/silvanocerza/robots/main/robot2.jpg",
"https://raw.githubusercontent.com/silvanocerza/robots/main/robot3.jpg",
"https://raw.githubusercontent.com/silvanocerza/robots/main/robot4.jpg"
]
images = [
ByteStream(data=requests.get(url).content, mime_type="image/jpeg")
for url in URLS
]
gemini = GoogleAIGeminiGenerator(model="gemini-pro-vision", api_key="<MY_API_KEY>")
result = gemini.run(parts = ["What can you tell me about this robots?", *images])
for answer in result["answers"]:
print(answer)
```
Input:
- **parts** A eterogeneous list of strings, ByteStream or Part objects.
Output:
- **answers** A list of strings or dictionaries with function calls.
"""

def __init__(
self,
*,
Expand All @@ -25,9 +68,32 @@ def __init__(
tools: Optional[List[Tool]] = None,
):
"""
Multi modal generator using Gemini model via Makersuite
"""
Initialize a GoogleAIGeminiGenerator instance.
If `api_key` is `None` it will use the `GOOGLE_API_KEY` env variable for authentication.
To get an API key, visit: https://makersuite.google.com
It supports the following models:
* `gemini-pro`
* `gemini-pro-vision`
* `gemini-ultra`
:param api_key: Google Makersuite API key, defaults to None
:param model: Name of the model to use, defaults to "gemini-pro-vision"
:param generation_config: The generation config to use, defaults to None.
Can either be a GenerationConfig object or a dictionary of parameters.
Accepted fields are:
- temperature
- top_p
- top_k
- candidate_count
- max_output_tokens
- stop_sequences
:param safety_settings: The safety settings to use, defaults to None.
A dictionary of HarmCategory to HarmBlockThreshold.
:param tools: The tools to use, defaults to None.
A list of Tool objects that can be used to modify the generation process.
"""
# Authenticate, if api_key is None it will use the GOOGLE_API_KEY env variable
genai.configure(api_key=api_key)

Expand Down

0 comments on commit 1bc67db

Please sign in to comment.