Skip to content

Commit

Permalink
Fixe Task Memory sensitive data example (#1087)
Browse files Browse the repository at this point in the history
  • Loading branch information
collindutter authored Aug 20, 2024
1 parent 1d84b7f commit d6dde68
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 20 deletions.
20 changes: 5 additions & 15 deletions docs/griptape-framework/structures/src/task_memory_6.py
Original file line number Diff line number Diff line change
@@ -1,37 +1,27 @@
from griptape.artifacts import TextArtifact
from griptape.configs import Defaults
from griptape.configs.drivers import OpenAiDriversConfig
from griptape.drivers import (
AmazonBedrockPromptDriver,
LocalVectorStoreDriver,
OpenAiChatPromptDriver,
OpenAiEmbeddingDriver,
)
from griptape.memory import TaskMemory
from griptape.memory.task.storage import TextArtifactStorage
from griptape.structures import Agent
from griptape.tools import FileManagerTool, QueryTool, WebScraperTool

Defaults.drivers_config = OpenAiDriversConfig(
prompt_driver=OpenAiChatPromptDriver(model="gpt-4"),
)

Defaults.drivers_config = OpenAiDriversConfig(
prompt_driver=OpenAiChatPromptDriver(model="gpt-4"),
)

vector_store_driver = LocalVectorStoreDriver(embedding_driver=OpenAiEmbeddingDriver())

agent = Agent(
task_memory=TaskMemory(
artifact_storages={
TextArtifact: TextArtifactStorage(
vector_store_driver=vector_store_driver,
)
}
),
tools=[
WebScraperTool(off_prompt=True),
QueryTool(off_prompt=True),
QueryTool(
off_prompt=True,
prompt_driver=AmazonBedrockPromptDriver(model="anthropic.claude-3-haiku-20240307-v1:0"),
),
FileManagerTool(off_prompt=True),
],
)
Expand Down
4 changes: 2 additions & 2 deletions docs/griptape-framework/structures/task-memory.md
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,8 @@ And now we get the expected output:

Because Task Memory splits up the storage and retrieval of data, you can use different models for each step.

Here is an example where we use GPT-4 to orchestrate the Tools and store the data in Task Memory, and Amazon Bedrock's Titan model to query the raw content.
In this example, GPT-4 _never_ sees the contents of the page, only that it was stored in Task Memory. Even the query results generated by the Titan model are stored in Task Memory so that the `FileManagerTool` can save the results to disk without GPT-4 ever seeing them.
Here is an example where we use GPT-4 to orchestrate the Tools and store the data in Task Memory, and Anthropic's Claude 3 Haiku model to query the raw content.
In this example, GPT-4 _never_ sees the contents of the page, only that it was stored in Task Memory. Even the query results generated by the Haiku model are stored in Task Memory so that the `FileManagerTool` can save the results to disk without GPT-4 ever seeing them.

```python
--8<-- "docs/griptape-framework/structures/src/task_memory_6.py"
Expand Down
11 changes: 8 additions & 3 deletions griptape/tools/query/tool.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from __future__ import annotations

from typing import TYPE_CHECKING

from attrs import Factory, define, field
from schema import Literal, Or, Schema

Expand All @@ -15,19 +17,22 @@
from griptape.tools.base_tool import BaseTool
from griptape.utils.decorators import activity

if TYPE_CHECKING:
from griptape.drivers.prompt.base_prompt_driver import BasePromptDriver


@define(kw_only=True)
class QueryTool(BaseTool, RuleMixin):
"""Tool for performing a query against data."""

prompt_driver: BasePromptDriver = field(default=Factory(lambda: Defaults.drivers_config.prompt_driver))

_rag_engine: RagEngine = field(
default=Factory(
lambda self: RagEngine(
response_stage=ResponseRagStage(
response_modules=[
PromptResponseRagModule(
prompt_driver=Defaults.drivers_config.prompt_driver, rulesets=self.rulesets
)
PromptResponseRagModule(prompt_driver=self.prompt_driver, rulesets=self.rulesets)
],
),
),
Expand Down

0 comments on commit d6dde68

Please sign in to comment.