Skip to content

Commit

Permalink
Honchofication (#141)
Browse files Browse the repository at this point in the history
* Basic Bloom

* Working VOE

* Remove comments and unused imports

* Checkpoint Commit restructuring API with new Honcho

* chore: application now builds

* Working Conversation Turns

* Metacognition steps use metamessages

* Move to Azure

* Working state
  • Loading branch information
VVoruganti committed Aug 28, 2024
1 parent cb489b5 commit a955768
Show file tree
Hide file tree
Showing 31 changed files with 12,025 additions and 1,992 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -72,3 +72,5 @@ www/.vercel
www/*.tsbuildinfo
www/next-env.d.ts
.vercel

.yarn/
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,17 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).


## [0.6.0]

### Added

- Honcho

### Changed

- Using mirascope instead of langchain for a smaller package size


## [0.5.2] - 2023-12-20

### Fixed
Expand Down
86 changes: 36 additions & 50 deletions agent/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,44 +3,10 @@
in OrderedDict data structure.
"""
from collections import OrderedDict
from .mediator import SupabaseMediator
import uuid
from typing import List, Tuple, Dict
from langchain.schema import BaseMessage
import sentry_sdk

class Conversation:
"Wrapper Class for storing contexts between channels. Using an object to pass by reference avoid additional cache hits"
@sentry_sdk.trace
def __init__(self, mediator: SupabaseMediator, user_id: str, conversation_id: str = str(uuid.uuid4()), location_id: str = "web", metadata: Dict = {}):
self.mediator: SupabaseMediator = mediator
self.user_id: str = user_id
self.conversation_id: str = conversation_id
self.location_id: str = location_id
self.metadata: Dict = metadata

@sentry_sdk.trace
def add_message(self, message_type: str, message: BaseMessage,) -> None:
self.mediator.add_message(self.conversation_id, self.user_id, message_type, message)

@sentry_sdk.trace
def messages(self, message_type: str, limit: Tuple[bool, int | None] = (True, 10)) -> List[BaseMessage]:
return self.mediator.messages(self.conversation_id, self.user_id, message_type, limit=limit)

@sentry_sdk.trace
def delete(self) -> None:
self.mediator.delete_conversation(self.conversation_id)

@sentry_sdk.trace
def messages(self, message_type: str, limit: Tuple[bool, int | None] = (True, 10)) -> List[BaseMessage]:
return self.mediator.messages(self.conversation_id, self.user_id, message_type, limit=limit)

@sentry_sdk.trace
def restart(self) -> None:
self.delete()
representation = self.mediator.add_conversation(user_id=self.user_id, location_id=self.location_id)
self.conversation_id: str = representation["id"]
self.metadata = representation["metadata"]
from typing import Dict
import sentry_sdk
from honcho import AsyncSession, AsyncHoncho as Honcho


class LRUCache:
Expand All @@ -59,7 +25,7 @@ def get(self, key: str):
return self.cache[key]

@sentry_sdk.trace
def put(self, key: str, value: Conversation):
def put(self, key: str, value: AsyncSession):
if key in self.cache:
# If the key already exists, move it to the end and update the value
self.cache.move_to_end(key)
Expand All @@ -71,47 +37,67 @@ def put(self, key: str, value: Conversation):
# Add or update the key-value pair at the end of the OrderedDict
self.cache[key] = value


class LayeredLRUCache:
"""A Conversation LRU Cache that bases keys on the location of a conversation. The assumption is that the location is a unique identifier"""

@sentry_sdk.trace
def __init__(self, capacity, mediator: SupabaseMediator):
def __init__(self, capacity: int, honcho: Honcho): # TODO add type indicator
# def __init__(self, capacity, mediator: SupabaseMediator):
self.capacity = capacity
self.memory_cache = OrderedDict()
self.mediator = mediator
self.honcho = honcho
# self.mediator = mediator

@sentry_sdk.trace
def get(self, user_id: str, location_id: str) -> None | Conversation:
key = location_id+user_id
def get(self, user_id: str, location_id: str) -> None | AsyncSession:
key = location_id + user_id
if key in self.memory_cache:
return self.memory_cache[key]
conversation = self.mediator.conversations(location_id=location_id, user_id=user_id)
conversation = self.mediator.conversations(
location_id=location_id, user_id=user_id
)
if conversation:
conversation_id = conversation[0]["id"]
metadata = conversation[0]["metadata"]
# Add the conversation data to the memory_cache
if len(self.memory_cache) >= self.capacity:
self.memory_cache.popitem(last=False)
self.memory_cache[key] = Conversation(self.mediator, location_id=location_id, user_id=user_id, conversation_id=conversation_id, metadata=metadata)
self.memory_cache[key] = AsyncSession(
location_id=location_id,
user_id=user_id,
conversation_id=conversation_id,
metadata=metadata,
)
return self.memory_cache[key]
return None

@sentry_sdk.trace
def put(self, user_id: str, location_id: str) -> Conversation:
def put(self, user_id: str, location_id: str) -> AsyncSession:
# Add the conversation data to the postgres via the mediator
representation: Dict = self.mediator.add_conversation(location_id=location_id, user_id=user_id)
representation: Dict = self.mediator.add_conversation(
location_id=location_id, user_id=user_id
)
conversation_id = representation["id"]
metadata = representation["metadata"]
key: str = location_id+user_id
key: str = location_id + user_id

if len(self.memory_cache) >= self.capacity:
# Remove the least recently used item from the memory cache
self.memory_cache.popitem(last=False)
self.memory_cache[key] = Conversation(self.mediator, location_id=location_id, user_id=user_id, conversation_id=conversation_id, metadata=metadata)
self.memory_cache[key] = AsyncSession(
location_id=location_id,
user_id=user_id,
conversation_id=conversation_id,
metadata=metadata,
)
return self.memory_cache[key]

@sentry_sdk.trace
def get_or_create(self, user_id: str, location_id: str, restart: bool = False) -> Conversation:
cache: None | Conversation = self.get(location_id=location_id, user_id=user_id)
def get_or_create(
self, user_id: str, location_id: str, restart: bool = False
) -> AsyncSession:
cache: None | AsyncSession = self.get(location_id=location_id, user_id=user_id)
if cache is None:
cache = self.put(location_id=location_id, user_id=user_id)
elif restart:
Expand Down
Loading

0 comments on commit a955768

Please sign in to comment.