Skip to content

Commit

Permalink
Sentry Support
Browse files Browse the repository at this point in the history
  • Loading branch information
VVoruganti committed Sep 19, 2023
1 parent 14920bb commit 010778f
Show file tree
Hide file tree
Showing 7 changed files with 88 additions and 2 deletions.
5 changes: 5 additions & 0 deletions .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,8 @@ OPENAI_API_BASE=
OPENAI_API_VERSION=
OPENAI_API_DEPLOYMENT_NAME=

# Sentry
SENTRY_DSN=
SENTRY_ENVIRONMENT=
SENTRY_RELEASE=

6 changes: 6 additions & 0 deletions cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,32 @@
from typing import List
from langchain.schema import BaseMessage, Document
from pydantic import BaseModel
import sentry_sdk

class Conversation:
"Wrapper Class for storing contexts between channels. Using an object to pass by reference avoid additional cache hits"
@sentry_sdk.trace
def __init__(self, mediator: SupabaseMediator, user_id: str, conversation_id: str = str(uuid.uuid4()), location_id: str = "web"):
self.mediator: SupabaseMediator = mediator
self.user_id: str = user_id
self.conversation_id: str = conversation_id
self.location_id: str = location_id

@sentry_sdk.trace
def add_message(self, message_type: str, message: BaseMessage,) -> None:
self.mediator.add_message(self.conversation_id, self.user_id, message_type, message)

@sentry_sdk.trace
def messages(self, message_type: str) -> List[BaseMessage]:
return self.mediator.messages(self.conversation_id, self.user_id, message_type)

# vector DB fn
@sentry_sdk.trace
def add_texts(self, texts: List[str]) -> None:
metadatas = [{"conversation_id": self.conversation_id, "user_id": self.user_id} for _ in range(len(texts))]
self.mediator.vector_table.add_texts(texts, metadatas)

# vector DB fn
@sentry_sdk.trace
def similarity_search(self, query: str, match_count: int = 5) -> List[Document]:
return self.mediator.vector_table.similarity_search(query=query, k=match_count, filter={"user_id": self.user_id})
11 changes: 11 additions & 0 deletions chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from collections.abc import AsyncIterator
from cache import Conversation
from typing import List
import sentry_sdk

load_dotenv()

Expand Down Expand Up @@ -40,6 +41,7 @@ def __init__(self) -> None:
pass

@classmethod
@sentry_sdk.trace
def think(cls, cache: Conversation, input: str):
"""Generate Bloom's thought on the user."""
# load message history
Expand All @@ -58,6 +60,7 @@ def think(cls, cache: Conversation, input: str):
)

@classmethod
@sentry_sdk.trace
def revise_thought(cls, cache: Conversation, input: str, thought: str):
"""Revise Bloom's thought about the user with retrieved personal data"""

Expand All @@ -80,6 +83,7 @@ def revise_thought(cls, cache: Conversation, input: str, thought: str):
)

@classmethod
@sentry_sdk.trace
def respond(cls, cache: Conversation, thought: str, input: str):
"""Generate Bloom's response to the user."""
response_prompt = ChatPromptTemplate.from_messages([
Expand All @@ -97,6 +101,7 @@ def respond(cls, cache: Conversation, thought: str, input: str):
)

@classmethod
@sentry_sdk.trace
async def think_user_prediction(cls, cache: Conversation, input: str):
"""Generate a thought about what the user is going to say"""

Expand All @@ -117,6 +122,7 @@ async def think_user_prediction(cls, cache: Conversation, input: str):
return user_prediction_thought.content

@classmethod
@sentry_sdk.trace
async def revise_user_prediction_thought(cls, cache: Conversation, user_prediction_thought: str, input: str):
"""Revise the thought about what the user is going to say based on retrieval of VoE facts"""

Expand All @@ -142,6 +148,7 @@ async def revise_user_prediction_thought(cls, cache: Conversation, user_predicti


@classmethod
@sentry_sdk.trace
async def think_violation_of_expectation(cls, cache: Conversation, inp: str, user_prediction_thought_revision: str) -> None:
"""Assess whether expectation was violated, derive and store facts"""

Expand All @@ -161,6 +168,7 @@ async def think_violation_of_expectation(cls, cache: Conversation, inp: str, use
return voe_thought.content

@classmethod
@sentry_sdk.trace
async def violation_of_expectation(cls, cache: Conversation, inp: str, user_prediction_thought_revision: str, voe_thought: str) -> None:
"""Assess whether expectation was violated, derive and store facts"""

Expand All @@ -185,6 +193,7 @@ async def violation_of_expectation(cls, cache: Conversation, inp: str, user_pred
return facts

@classmethod
@sentry_sdk.trace
async def check_voe_list(cls, cache: Conversation, facts: List[str]):
"""Filter the facts to just new ones"""

Expand Down Expand Up @@ -218,6 +227,7 @@ async def check_voe_list(cls, cache: Conversation, facts: List[str]):
cache.add_texts(data)

@classmethod
@sentry_sdk.trace
async def chat(cls, cache: Conversation, inp: str ) -> tuple[str, str]:
# VoE has to happen first. If there's user prediction history, derive and store fact(s)
if cache.messages('user_prediction_thought_revision'):
Expand Down Expand Up @@ -246,6 +256,7 @@ async def chat(cls, cache: Conversation, inp: str ) -> tuple[str, str]:
return thought, response

@classmethod
@sentry_sdk.trace
async def stream(cls, cache: Conversation, inp: str ):
# VoE has to happen first. If there's user prediction history, derive and store fact(s)
try:
Expand Down
9 changes: 9 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,18 @@
from mediator import SupabaseMediator
from cache import Conversation

import os
from dotenv import load_dotenv
import sentry_sdk
load_dotenv()

rate = 0.2 if os.getenv("SENTRY_ENVIRONMENT") == "production" else 1.0
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
traces_sample_rate=rate,
profiles_sample_rate=rate
)

app = FastAPI()

MEDIATOR = SupabaseMediator()
Expand Down
9 changes: 8 additions & 1 deletion mediator.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from langchain.embeddings.base import Embeddings
from langchain.embeddings.openai import OpenAIEmbeddings
import uuid
import urllib
import sentry_sdk
import os
from dotenv import load_dotenv
# Supabase for Postgres Management
Expand All @@ -16,6 +16,8 @@
load_dotenv()

class SupabaseMediator:

@sentry_sdk.trace
def __init__(self):
self.supabase: Client = create_client(os.environ['SUPABASE_URL'], os.environ['SUPABASE_KEY'])
self.memory_table = os.environ["MEMORY_TABLE"]
Expand All @@ -42,27 +44,32 @@ def __init__(self):

# self.vector_table.add_documents(seed_docs)

@sentry_sdk.trace
def messages(self, session_id: str, user_id: str, message_type: str) -> List[BaseMessage]: # type: ignore
response = self.supabase.table(self.memory_table).select("message").eq("session_id", session_id).eq("user_id", user_id).eq("message_type", message_type).order("id", desc=True).limit(10).execute()
items = [record["message"] for record in response.data]
messages = messages_from_dict(items)
return messages[::-1]

@sentry_sdk.trace
def add_message(self, session_id: str, user_id: str, message_type: str, message: BaseMessage) -> None:
self.supabase.table(self.memory_table).insert({"session_id": session_id, "user_id": user_id, "message_type": message_type, "message": _message_to_dict(message)}).execute()

@sentry_sdk.trace
def conversations(self, location_id: str, user_id: str) -> str | None:
response = self.supabase.table(self.conversation_table).select("id").eq("location_id", location_id).eq("user_id", user_id).eq("isActive", True).maybe_single().execute()
if response:
conversation_id = response.data["id"]
return conversation_id
return None

@sentry_sdk.trace
def add_conversation(self, location_id: str, user_id: str) -> str:
conversation_id = str(uuid.uuid4())
self.supabase.table(self.conversation_table).insert({"id": conversation_id, "user_id": user_id, "location_id": location_id}).execute()
return conversation_id

@sentry_sdk.trace
def delete_conversation(self, conversation_id: str) -> None:
self.supabase.table(self.conversation_table).update({"isActive": False}).eq("id", conversation_id).execute()

49 changes: 48 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ openai = "^0.28.0"
pydantic = "^2.3.0"
python-dotenv = "^1.0.0"
uvicorn = "^0.23.2"
sentry-sdk = {extras = ["fastapi"], version = "^1.31.0"}


[build-system]
Expand Down

0 comments on commit 010778f

Please sign in to comment.