From 5341ebf7e27f946000f00244aa06bc6ec7a60346 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 31 Jan 2024 11:12:48 -0500 Subject: [PATCH 01/46] =?UTF-8?q?=F0=9F=A7=AA=20asyncify=20tests?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sdk/poetry.lock | 120 ++++++++++++++++++++++++++++++++++++++- sdk/pyproject.toml | 2 + sdk/tests/test_honcho.py | 48 +++++++++------- 3 files changed, 148 insertions(+), 22 deletions(-) diff --git a/sdk/poetry.lock b/sdk/poetry.lock index 40c58fa..50f59b6 100644 --- a/sdk/poetry.lock +++ b/sdk/poetry.lock @@ -1,5 +1,27 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "certifi" version = "2023.11.17" @@ -135,6 +157,62 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.2" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.23.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "idna" version = "3.6" @@ -205,6 +283,24 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.23.4" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-asyncio-0.23.4.tar.gz", hash = "sha256:2143d9d9375bf372a73260e4114541485e84fca350b0b6b92674ca56ff5f7ea2"}, + {file = "pytest_asyncio-0.23.4-py3-none-any.whl", hash = "sha256:b0079dfac14b60cd1ce4691fbfb1748fe939db7d0234b5aba97197d10fbe0fef"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<8" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "requests" version = "2.31.0" @@ -226,6 +322,17 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -237,6 +344,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + [[package]] name = "urllib3" version = "2.1.0" @@ -256,4 +374,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "0aa81b5a08a235ff5f275db8e6d0265d97fd1e350567862ee6b5afddaaf15620" +content-hash = "d5dbd21023598b83062e7705f329579b9175f4da4db66559ea84399246cfcc25" diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index fcf44d4..5f9c596 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -10,9 +10,11 @@ packages = [{include = "honcho"}] [tool.poetry.dependencies] python = "^3.10" requests = "^2.31.0" +httpx = "^0.26.0" [tool.poetry.group.test.dependencies] pytest = "^7.4.4" +pytest-asyncio = "^0.23.4" [build-system] requires = ["poetry-core"] diff --git a/sdk/tests/test_honcho.py b/sdk/tests/test_honcho.py index 9abfb29..714c6a9 100644 --- a/sdk/tests/test_honcho.py +++ b/sdk/tests/test_honcho.py @@ -1,58 +1,64 @@ +import pytest from honcho import Client from uuid import uuid1 -def test_session_creation_retrieval(): +@pytest.mark.asyncio +async def test_session_creation_retrieval(): client = Client("http://localhost:8000") user_id = str(uuid1()) - created_session = client.create_session(user_id) - retrieved_session = client.get_session(user_id, created_session.id) + created_session = await client.create_session(user_id) + retrieved_session = await client.get_session(user_id, created_session.id) assert retrieved_session.id == created_session.id assert retrieved_session.is_active == True assert retrieved_session.location_id == "default" assert retrieved_session.session_data == {} -def test_session_multiple_retrieval(): +@pytest.mark.asyncio +async def test_session_multiple_retrieval(): client = Client("http://localhost:8000") user_id = str(uuid1()) - created_session_1 = client.create_session(user_id) - created_session_2 = client.create_session(user_id) - retrieved_sessions = client.get_sessions(user_id) + created_session_1 = await client.create_session(user_id) + created_session_2 = await client.create_session(user_id) + retrieved_sessions = await client.get_sessions(user_id) assert len(retrieved_sessions) == 2 assert retrieved_sessions[0].id == created_session_1.id assert retrieved_sessions[1].id == created_session_2.id -def test_session_update(): +@pytest.mark.asyncio +async def test_session_update(): user_id = str(uuid1()) client = Client("http://localhost:8000") - created_session = client.create_session(user_id) - assert created_session.update({"foo": "bar"}) - retrieved_session = client.get_session(user_id, created_session.id) + created_session = await client.create_session(user_id) + assert await created_session.update({"foo": "bar"}) + retrieved_session = await client.get_session(user_id, created_session.id) assert retrieved_session.session_data == {"foo": "bar"} -def test_session_deletion(): +@pytest.mark.asyncio +async def test_session_deletion(): user_id = str(uuid1()) client = Client("http://localhost:8000") - created_session = client.create_session(user_id) + created_session = await client.create_session(user_id) assert created_session.is_active == True - created_session.delete() + await created_session.delete() assert created_session.is_active == False - retrieved_session = client.get_session(user_id, created_session.id) + retrieved_session = await client.get_session(user_id, created_session.id) assert retrieved_session.is_active == False assert retrieved_session.id == created_session.id -def test_messages(): +@pytest.mark.asyncio +async def test_messages(): user_id = str(uuid1()) client = Client("http://localhost:8000") - created_session = client.create_session(user_id) - created_session.create_message(is_user=True, content="Hello") - created_session.create_message(is_user=False, content="Hi") - retrieved_session = client.get_session(user_id, created_session.id) - messages = retrieved_session.get_messages() + created_session = await client.create_session(user_id) + await created_session.create_message(is_user=True, content="Hello") + await created_session.create_message(is_user=False, content="Hi") + retrieved_session = await client.get_session(user_id, created_session.id) + messages = await retrieved_session.get_messages() assert len(messages) == 2 user_message, ai_message = messages assert user_message.content == "Hello" From 942137ff66861f893b6afb3775e3f87ee41fcc5f Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 31 Jan 2024 11:14:42 -0500 Subject: [PATCH 02/46] =?UTF-8?q?=E2=9C=A8=20asyncify=20client?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sdk/honcho/client.py | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index 7ac0d42..fcdf6e0 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -1,14 +1,15 @@ import json from typing import Dict -import requests +import httpx class Client: def __init__(self, base_url): """Constructor for Client""" self.base_url = base_url # Base URL for the instance of the Honcho API + self.client = httpx.AsyncClient() - def get_session(self, user_id: str, session_id: int): + async def get_session(self, user_id: str, session_id: int): """Get a specific session for a user by ID Args: @@ -20,7 +21,7 @@ def get_session(self, user_id: str, session_id: int): """ url = f"{self.base_url}/users/{user_id}/sessions/{session_id}" - response = requests.get(url) + response = await self.client.get(url) data = response.json() return Session( client=self, @@ -31,7 +32,7 @@ def get_session(self, user_id: str, session_id: int): session_data=data["session_data"], ) - def get_sessions(self, user_id: str, location_id: str | None = None): + async def get_sessions(self, user_id: str, location_id: str | None = None): """Return sessions associated with a user Args: @@ -45,7 +46,7 @@ def get_sessions(self, user_id: str, location_id: str | None = None): url = f"{self.base_url}/users/{user_id}/sessions" + ( f"?location_id={location_id}" if location_id else "" ) - response = requests.get(url) + response = await self.client.get(url) return [ Session( client=self, @@ -58,7 +59,7 @@ def get_sessions(self, user_id: str, location_id: str | None = None): for session in response.json() ] - def create_session( + async def create_session( self, user_id: str, location_id: str = "default", session_data: Dict = {} ): """Create a session for a user @@ -74,7 +75,7 @@ def create_session( """ data = {"location_id": location_id, "session_data": session_data} url = f"{self.base_url}/users/{user_id}/sessions" - response = requests.post(url, json=data) + response = await self.client.post(url, json=data) data = response.json() return Session( self, @@ -98,6 +99,7 @@ def __init__( ): """Constructor for Session""" self.base_url = client.base_url + self.client = client.client self.id = id self.user_id = user_id self.location_id = location_id @@ -113,7 +115,7 @@ def __str__(self): def is_active(self): return self._is_active - def create_message(self, is_user: bool, content: str): + async def create_message(self, is_user: bool, content: str): """Adds a message to the session Args: @@ -128,11 +130,11 @@ def create_message(self, is_user: bool, content: str): raise Exception("Session is inactive") data = {"is_user": is_user, "content": content} url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}/messages" - response = requests.post(url, json=data) + response = await self.client.post(url, json=data) data = response.json() return Message(self, id=data["id"], is_user=is_user, content=content) - def get_messages(self): + async def get_messages(self): """Get all messages for a session Args: @@ -144,7 +146,7 @@ def get_messages(self): """ url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}/messages" - response = requests.get(url) + response = await self.client.get(url) data = response.json() return [ Message( @@ -156,7 +158,7 @@ def get_messages(self): for message in data ] - def update(self, session_data: Dict): + async def update(self, session_data: Dict): """Update the metadata of a session Args: @@ -168,15 +170,15 @@ def update(self, session_data: Dict): """ info = {"session_data": session_data} url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}" - response = requests.put(url, json=info) + response = await self.client.put(url, json=info) success = response.status_code < 400 self.session_data = session_data return success - def delete(self): + async def delete(self): """Delete a session by marking it as inactive""" url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}" - response = requests.delete(url) + response = await self.client.delete(url) self._is_active = False From 387fb666f2d2a622740d0a5cb8217745ca46556a Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Tue, 6 Feb 2024 14:29:10 -0800 Subject: [PATCH 03/46] Basic Test for Page based pagination --- api/poetry.lock | 36 +++++++++- api/pyproject.toml | 1 + api/src/crud.py | 12 ++-- api/src/main.py | 23 ++++--- api/src/schemas.py | 2 +- sdk/honcho/__init__.py | 2 +- sdk/honcho/client.py | 137 +++++++++++++++++++++++++++++++-------- sdk/tests/test_honcho.py | 70 ++++++++++++++++++-- 8 files changed, 236 insertions(+), 47 deletions(-) diff --git a/api/poetry.lock b/api/poetry.lock index 1fdafca..252389c 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -118,6 +118,40 @@ typing-extensions = ">=4.8.0" [package.extras] all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +[[package]] +name = "fastapi-pagination" +version = "0.12.14" +description = "FastAPI pagination" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "fastapi_pagination-0.12.14-py3-none-any.whl", hash = "sha256:59b6c5626b1d21c610da333c7a586d625f6c81d8fa26267a4b598aae736f6753"}, + {file = "fastapi_pagination-0.12.14.tar.gz", hash = "sha256:4148694b1e170055eea0a5e691dbc640c4bf55eb0086cf11d14b164c35660559"}, +] + +[package.dependencies] +fastapi = ">=0.93.0" +pydantic = ">=1.9.1" +typing-extensions = ">=4.8.0,<5.0.0" + +[package.extras] +all = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)", "beanie (>=1.11.9,<2.0.0)", "bunnet (>=1.1.0,<2.0.0)", "databases (>=0.6.0)", "django (<5.0.0)", "mongoengine (>=0.23.1,<0.28.0)", "motor (>=2.5.1,<4.0.0)", "orm (>=0.3.1)", "ormar (>=0.11.2)", "piccolo (>=0.89,<0.122)", "pony (>=0.7.16,<0.8.0)", "scylla-driver (>=3.25.6,<4.0.0)", "sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.15)", "tortoise-orm (>=0.16.18,<0.21.0)"] +asyncpg = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)"] +beanie = ["beanie (>=1.11.9,<2.0.0)"] +bunnet = ["bunnet (>=1.1.0,<2.0.0)"] +databases = ["databases (>=0.6.0)"] +django = ["databases (>=0.6.0)", "django (<5.0.0)"] +mongoengine = ["mongoengine (>=0.23.1,<0.28.0)"] +motor = ["motor (>=2.5.1,<4.0.0)"] +orm = ["databases (>=0.6.0)", "orm (>=0.3.1)"] +ormar = ["ormar (>=0.11.2)"] +piccolo = ["piccolo (>=0.89,<0.122)"] +scylla-driver = ["scylla-driver (>=3.25.6,<4.0.0)"] +sqlalchemy = ["SQLAlchemy (>=1.3.20)", "sqlakeyset (>=2.0.1680321678,<3.0.0)"] +sqlmodel = ["sqlakeyset (>=2.0.1680321678,<3.0.0)", "sqlmodel (>=0.0.8,<0.0.15)"] +tortoise = ["tortoise-orm (>=0.16.18,<0.21.0)"] + [[package]] name = "greenlet" version = "3.0.3" @@ -778,4 +812,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "ff6b70507443b2f4327c19691815f1f7d2814b16e635ff62b7224f30bd81eeab" +content-hash = "115cde0c7dc1de7906b4f17bbdaced3f98a969c33c3b85976a1dc5b0aeece3e2" diff --git a/api/pyproject.toml b/api/pyproject.toml index 08d6a68..bcf746e 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -13,6 +13,7 @@ python-dotenv = "^1.0.0" sqlalchemy = "^2.0.25" psycopg2-binary = "^2.9.9" slowapi = "^0.1.8" +fastapi-pagination = "^0.12.14" [build-system] diff --git a/api/src/crud.py b/api/src/crud.py index 36cfabf..b8cfdcb 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -1,7 +1,7 @@ import json from typing import Sequence, Optional -from sqlalchemy import select +from sqlalchemy import select, Select from sqlalchemy.orm import Session from . import models, schemas @@ -18,7 +18,7 @@ def get_session(db: Session, app_id: str, session_id: int, user_id: Optional[str def get_sessions( db: Session, app_id: str, user_id: str, location_id: str | None = None -) -> Sequence[schemas.Session]: +) -> Select: stmt = ( select(models.Session) .where(models.Session.app_id == app_id) @@ -29,7 +29,8 @@ def get_sessions( if location_id is not None: stmt = stmt.where(models.Session.location_id == location_id) - return db.scalars(stmt).all() + return stmt + # return db.scalars(stmt).all() # filtered_by_user = db.query(models.Session).filter( # models.Session.user_id == user_id @@ -110,12 +111,13 @@ def create_message( def get_messages( db: Session, app_id: str, user_id: str, session_id: int -) -> Sequence[schemas.Message]: +) -> Select: session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) if session is None: raise ValueError("Session not found or does not belong to user") stmt = select(models.Message).where(models.Message.session_id == session_id) - return db.scalars(stmt).all() + return stmt + # return db.scalars(stmt).all() # return ( # db.query(models.Message) # .filter(models.Message.session_id == session_id) diff --git a/api/src/main.py b/api/src/main.py index a631eeb..f35e953 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -5,6 +5,9 @@ from slowapi.middleware import SlowAPIMiddleware from slowapi.util import get_remote_address from slowapi.errors import RateLimitExceeded + +from fastapi_pagination import Page, add_pagination +from fastapi_pagination.ext.sqlalchemy import paginate # import uvicorn from . import crud, models, schemas @@ -17,7 +20,7 @@ router = APIRouter(prefix="/apps/{app_id}/users/{user_id}") # Create a Limiter instance -limiter = Limiter(key_func=get_remote_address, default_limits=["5/minute"]) +limiter = Limiter(key_func=get_remote_address, default_limits=["100/minute"]) # Add SlowAPI middleware to the application app.state.limiter = limiter @@ -25,6 +28,8 @@ app.add_middleware(SlowAPIMiddleware) +add_pagination(app) + def get_db(): """FastAPI Dependency Generator for Database""" db = SessionLocal() @@ -37,7 +42,7 @@ def get_db(): # Session Routes ######################################################## -@router.get("/sessions", response_model=list[schemas.Session]) +@router.get("/sessions", response_model=Page[schemas.Session]) def get_sessions(request: Request, app_id: str, user_id: str, location_id: Optional[str] = None, db: Session = Depends(get_db)): """Get All Sessions for a User @@ -50,9 +55,11 @@ def get_sessions(request: Request, app_id: str, user_id: str, location_id: Optio list[schemas.Session]: List of Session objects """ - if location_id is not None: - return crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id) - return crud.get_sessions(db, app_id=app_id, user_id=user_id) + # if location_id is not None: + # return paginate(db, crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id)) + # return crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id) + return paginate(db, crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id)) + # return crud.get_sessions(db, app_id=app_id, user_id=user_id) @router.post("/sessions", response_model=schemas.Session) @@ -186,7 +193,7 @@ def create_message_for_session( @router.get( "/sessions/{session_id}/messages", - response_model=list[schemas.Message] + response_model=Page[schemas.Message] ) def get_messages_for_session( request: Request, @@ -210,13 +217,13 @@ def get_messages_for_session( """ try: - return crud.get_messages(db, app_id=app_id, user_id=user_id, session_id=session_id) + return paginate(db, crud.get_messages(db, app_id=app_id, user_id=user_id, session_id=session_id)) except ValueError: raise HTTPException(status_code=404, detail="Session not found") - app.include_router(router) + ######################################################## # Metacognition Routes ######################################################## diff --git a/api/src/schemas.py b/api/src/schemas.py index e3fed6e..8f51564 100644 --- a/api/src/schemas.py +++ b/api/src/schemas.py @@ -33,7 +33,7 @@ class SessionUpdate(SessionBase): class Session(SessionBase): id: int - messages: list[Message] + # messages: list[Message] is_active: bool user_id: str location_id: str diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index 6d85755..fcdf6bc 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,2 +1,2 @@ -from .client import Client +from .client import Client, GetSessionResponse, GetMessageResponse from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index 1a41f74..bb87b62 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -1,7 +1,55 @@ import json -from typing import Dict +from typing import Dict, Optional import requests +class GetSessionResponse: + def __init__(self, client, response: Dict): + self.client = client + self.total = response["total"] + self.page = response["page"] + self.page_size = response["size"] + self.pages = response["pages"] + self.sessions = [ + Session( + client=client, + id=session["id"], + user_id=session["user_id"], + location_id=session["location_id"], + is_active=session["is_active"], + session_data=session["session_data"], + ) + for session in response["items"] + ] + + def next(self): + if self.page >= self.pages: + return None + user_id = self.sessions[0].user_id + location_id = self.sessions[0].location_id + return self.client.get_sessions(user_id, location_id, self.page + 1, self.page_size) + +class GetMessageResponse: + def __init__(self, session, response: Dict): + self.session = session + self.total = response["total"] + self.page = response["page"] + self.page_size = response["size"] + self.pages = response["pages"] + self.messages = [ + Message( + session=session, + id=message["id"], + is_user=message["is_user"], + content=message["content"], + ) + for message in response["items"] + ] + + def next(self): + if self.page >= self.pages: + return None + return self.session.get_messages((self.page + 1), self.page_size) + class Client: def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): @@ -36,7 +84,7 @@ def get_session(self, user_id: str, session_id: int): session_data=data["session_data"], ) - def get_sessions(self, user_id: str, location_id: str | None = None): + def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): """Return sessions associated with a user Args: @@ -47,21 +95,39 @@ def get_sessions(self, user_id: str, location_id: str | None = None): list[Dict]: List of Session objects """ - url = f"{self.common_prefix}/users/{user_id}/sessions" + ( - f"?location_id={location_id}" if location_id else "" + url = f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}" + ( + f"&location_id={location_id}" if location_id else "" ) - response = requests.get(url) - return [ - Session( - client=self, - id=session["id"], - user_id=session["user_id"], - location_id=session["location_id"], - is_active=session["is_active"], - session_data=session["session_data"], - ) - for session in response.json() - ] + response = requests.get(url) # TODO add validation and error handling + response.raise_for_status() + return GetSessionResponse(self, response.json()) + # [ + # Session( + # client=self, + # id=session["id"], + # user_id=session["user_id"], + # location_id=session["location_id"], + # is_active=session["is_active"], + # session_data=session["session_data"], + # ) + # for session in response.json() + # ] + + def get_session_generator(self, user_id: str, location_id: Optional[str] = None): + page = 1 + page_size = 50 + get_session_response = self.get_sessions(user_id, location_id, page, page_size) + while True: + # get_session_response = self.get_sessions(user_id, location_id, page, page_size) + for session in get_session_response.sessions: + yield session + + new_sessions = get_session_response.next() + if not new_sessions: + break + + get_session_response = new_sessions + def create_session( self, user_id: str, location_id: str = "default", session_data: Dict = {} @@ -142,7 +208,7 @@ def create_message(self, is_user: bool, content: str): data = response.json() return Message(self, id=data["id"], is_user=is_user, content=content) - def get_messages(self): + def get_messages(self, page: int = 1, page_size: int = 50): """Get all messages for a session Args: @@ -153,18 +219,35 @@ def get_messages(self): list[Dict]: List of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages" + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" response = requests.get(url) + response.raise_for_status() data = response.json() - return [ - Message( - self, - id=message["id"], - is_user=message["is_user"], - content=message["content"], - ) - for message in data - ] + return GetMessageResponse(self, data) + # return [ + # Message( + # self, + # id=message["id"], + # is_user=message["is_user"], + # content=message["content"], + # ) + # for message in data + # ] + def get_messages_generator(self): + page = 1 + page_size = 50 + get_messages_response = self.get_messages(page, page_size) + while True: + # get_session_response = self.get_sessions(user_id, location_id, page, page_size) + for message in get_messages_response.messages: + yield message + + new_messages = get_messages_response.next() + if not new_messages: + break + + get_messages_response = new_messages + def update(self, session_data: Dict): """Update the metadata of a session diff --git a/sdk/tests/test_honcho.py b/sdk/tests/test_honcho.py index 78609ff..646935c 100644 --- a/sdk/tests/test_honcho.py +++ b/sdk/tests/test_honcho.py @@ -1,4 +1,4 @@ -from honcho import Client +from honcho import Client, GetSessionResponse, GetMessageResponse from uuid import uuid1 import pytest @@ -20,7 +20,9 @@ def test_session_multiple_retrieval(): user_id = str(uuid1()) created_session_1 = client.create_session(user_id) created_session_2 = client.create_session(user_id) - retrieved_sessions = client.get_sessions(user_id) + response = client.get_sessions(user_id) + retrieved_sessions = response.sessions + assert len(retrieved_sessions) == 2 assert retrieved_sessions[0].id == created_session_1.id assert retrieved_sessions[1].id == created_session_2.id @@ -57,7 +59,8 @@ def test_messages(): created_session.create_message(is_user=True, content="Hello") created_session.create_message(is_user=False, content="Hi") retrieved_session = client.get_session(user_id, created_session.id) - messages = retrieved_session.get_messages() + response = retrieved_session.get_messages() + messages = response.messages assert len(messages) == 2 user_message, ai_message = messages assert user_message.content == "Hello" @@ -71,7 +74,7 @@ def test_rate_limit(): client = Client(app_id, "http://localhost:8000") created_session = client.create_session(user_id) with pytest.raises(Exception): - for _ in range(10): + for _ in range(105): created_session.create_message(is_user=True, content="Hello") created_session.create_message(is_user=False, content="Hi") @@ -87,3 +90,62 @@ def test_app_id_security(): with pytest.raises(Exception): client_2.get_session(user_id, created_session.id) + +def test_paginated_sessions(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Client(app_id, "http://localhost:8000") + for i in range(10): + client.create_session(user_id) + + page = 1 + page_size = 2 + get_session_response = client.get_sessions(user_id, page=page, page_size=page_size) + assert len(get_session_response.sessions) == page_size + + assert get_session_response.pages == 5 + + new_session_response = get_session_response.next() + assert new_session_response is not None + assert isinstance(new_session_response, GetSessionResponse) + assert len(new_session_response.sessions) == page_size + + final_page = client.get_sessions(user_id, page=5, page_size=page_size) + + assert len(final_page.sessions) == 2 + next_page = final_page.next() + assert next_page is None + + +def test_paginated_messages(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Client(app_id, "http://localhost:8000") + created_session = client.create_session(user_id) + for i in range(10): + created_session.create_message(is_user=True, content="Hello") + created_session.create_message(is_user=False, content="Hi") + + page_size = 7 + get_message_response = created_session.get_messages(page=1, page_size=page_size) + + assert get_message_response is not None + assert isinstance(get_message_response, GetMessageResponse) + assert len(get_message_response.messages) == page_size + + new_message_response = get_message_response.next() + + assert new_message_response is not None + assert isinstance(new_message_response, GetMessageResponse) + assert len(new_message_response.messages) == page_size + + final_page = created_session.get_messages(page=3, page_size=page_size) + + assert len(final_page.messages) == 20 - ((3-1) * 7) + + next_page = final_page.next() + + assert next_page is None + + + From 410f91457184fdb3d4de97c143ec400c9ac7ca92 Mon Sep 17 00:00:00 2001 From: hyusap Date: Tue, 6 Feb 2024 17:34:25 -0500 Subject: [PATCH 04/46] add sync buildstep and client --- scripts/syncronizer.py | 18 ++ sdk/honcho/__init__.py | 3 +- sdk/honcho/client.py | 20 +- sdk/honcho/syncclient.py | 194 ++++++++++++++++++++ sdk/tests/{test_honcho.py => test_async.py} | 12 +- sdk/tests/test_sync.py | 62 +++++++ 6 files changed, 292 insertions(+), 17 deletions(-) create mode 100644 scripts/syncronizer.py create mode 100644 sdk/honcho/syncclient.py rename sdk/tests/{test_honcho.py => test_async.py} (89%) create mode 100644 sdk/tests/test_sync.py diff --git a/scripts/syncronizer.py b/scripts/syncronizer.py new file mode 100644 index 0000000..fc560d9 --- /dev/null +++ b/scripts/syncronizer.py @@ -0,0 +1,18 @@ +import os +import re + +# Open the source file +this_dir = os.path.dirname(os.path.abspath(__file__)) +source_file_path = os.path.join(this_dir, "../sdk/honcho/client.py") +with open(source_file_path, "r") as source_file: + source_code = source_file.read() + +# Use regex to remove async mentions +sync_code = re.sub(r"async\s", "", source_code) +sync_code = re.sub(r"await\s", "", sync_code) +sync_code = re.sub(r"Async", "", sync_code) + +# Write the modified code to the destination file +destination_file_path = os.path.join(this_dir, "../sdk/honcho/syncclient.py") +with open(destination_file_path, "w") as destination_file: + destination_file.write(sync_code) diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index 6d85755..892b790 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,2 +1,3 @@ -from .client import Client +from .client import AsyncClient +from .syncclient import Client from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index fcdf6e0..a6272c0 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -3,7 +3,7 @@ import httpx -class Client: +class AsyncClient: def __init__(self, base_url): """Constructor for Client""" self.base_url = base_url # Base URL for the instance of the Honcho API @@ -23,7 +23,7 @@ async def get_session(self, user_id: str, session_id: int): url = f"{self.base_url}/users/{user_id}/sessions/{session_id}" response = await self.client.get(url) data = response.json() - return Session( + return AsyncSession( client=self, id=data["id"], user_id=data["user_id"], @@ -48,7 +48,7 @@ async def get_sessions(self, user_id: str, location_id: str | None = None): ) response = await self.client.get(url) return [ - Session( + AsyncSession( client=self, id=session["id"], user_id=session["user_id"], @@ -77,7 +77,7 @@ async def create_session( url = f"{self.base_url}/users/{user_id}/sessions" response = await self.client.post(url, json=data) data = response.json() - return Session( + return AsyncSession( self, id=data["id"], user_id=user_id, @@ -87,10 +87,10 @@ async def create_session( ) -class Session: +class AsyncSession: def __init__( self, - client: Client, + client: AsyncClient, id: int, user_id: str, location_id: str, @@ -132,7 +132,7 @@ async def create_message(self, is_user: bool, content: str): url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}/messages" response = await self.client.post(url, json=data) data = response.json() - return Message(self, id=data["id"], is_user=is_user, content=content) + return AsyncMessage(self, id=data["id"], is_user=is_user, content=content) async def get_messages(self): """Get all messages for a session @@ -149,7 +149,7 @@ async def get_messages(self): response = await self.client.get(url) data = response.json() return [ - Message( + AsyncMessage( self, id=message["id"], is_user=message["is_user"], @@ -182,8 +182,8 @@ async def delete(self): self._is_active = False -class Message: - def __init__(self, session: Session, id: int, is_user: bool, content: str): +class AsyncMessage: + def __init__(self, session: AsyncSession, id: int, is_user: bool, content: str): """Constructor for Message""" self.session = session self.id = id diff --git a/sdk/honcho/syncclient.py b/sdk/honcho/syncclient.py new file mode 100644 index 0000000..bc535db --- /dev/null +++ b/sdk/honcho/syncclient.py @@ -0,0 +1,194 @@ +import json +from typing import Dict +import httpx + + +class Client: + def __init__(self, base_url): + """Constructor for Client""" + self.base_url = base_url # Base URL for the instance of the Honcho API + self.client = httpx.Client() + + def get_session(self, user_id: str, session_id: int): + """Get a specific session for a user by ID + + Args: + user_id (str): The User ID representing the user, managed by the user + session_id (int): The ID of the Session to retrieve + + Returns: + Dict: The Session object of the requested Session + + """ + url = f"{self.base_url}/users/{user_id}/sessions/{session_id}" + response = self.client.get(url) + data = response.json() + return Session( + client=self, + id=data["id"], + user_id=data["user_id"], + location_id=data["location_id"], + is_active=data["is_active"], + session_data=data["session_data"], + ) + + def get_sessions(self, user_id: str, location_id: str | None = None): + """Return sessions associated with a user + + Args: + user_id (str): The User ID representing the user, managed by the user + location_id (str, optional): Optional Location ID representing the location of a session + + Returns: + list[Dict]: List of Session objects + + """ + url = f"{self.base_url}/users/{user_id}/sessions" + ( + f"?location_id={location_id}" if location_id else "" + ) + response = self.client.get(url) + return [ + Session( + client=self, + id=session["id"], + user_id=session["user_id"], + location_id=session["location_id"], + is_active=session["is_active"], + session_data=session["session_data"], + ) + for session in response.json() + ] + + def create_session( + self, user_id: str, location_id: str = "default", session_data: Dict = {} + ): + """Create a session for a user + + Args: + user_id (str): The User ID representing the user, managed by the user + location_id (str, optional): Optional Location ID representing the location of a session + session_data (Dict, optional): Optional session metadata + + Returns: + Dict: The Session object of the new Session` + + """ + data = {"location_id": location_id, "session_data": session_data} + url = f"{self.base_url}/users/{user_id}/sessions" + response = self.client.post(url, json=data) + data = response.json() + return Session( + self, + id=data["id"], + user_id=user_id, + location_id=location_id, + session_data=session_data, + is_active=data["is_active"], + ) + + +class Session: + def __init__( + self, + client: Client, + id: int, + user_id: str, + location_id: str, + session_data: dict | str, + is_active: bool, + ): + """Constructor for Session""" + self.base_url = client.base_url + self.client = client.client + self.id = id + self.user_id = user_id + self.location_id = location_id + self.session_data = ( + session_data if isinstance(session_data, dict) else json.loads(session_data) + ) + self._is_active = is_active + + def __str__(self): + return f"Session(id={self.id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" + + @property + def is_active(self): + return self._is_active + + def create_message(self, is_user: bool, content: str): + """Adds a message to the session + + Args: + is_user (bool): Whether the message is from the user + content (str): The content of the message + + Returns: + Dict: The Message object of the added message + + """ + if not self.is_active: + raise Exception("Session is inactive") + data = {"is_user": is_user, "content": content} + url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}/messages" + response = self.client.post(url, json=data) + data = response.json() + return Message(self, id=data["id"], is_user=is_user, content=content) + + def get_messages(self): + """Get all messages for a session + + Args: + user_id (str): The User ID representing the user, managed by the user + session_id (int): The ID of the Session to retrieve + + Returns: + list[Dict]: List of Message objects + + """ + url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}/messages" + response = self.client.get(url) + data = response.json() + return [ + Message( + self, + id=message["id"], + is_user=message["is_user"], + content=message["content"], + ) + for message in data + ] + + def update(self, session_data: Dict): + """Update the metadata of a session + + Args: + session_data (Dict): The Session object containing any new metadata + + + Returns: + boolean: Whether the session was successfully updated + """ + info = {"session_data": session_data} + url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}" + response = self.client.put(url, json=info) + success = response.status_code < 400 + self.session_data = session_data + return success + + def delete(self): + """Delete a session by marking it as inactive""" + url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}" + response = self.client.delete(url) + self._is_active = False + + +class Message: + def __init__(self, session: Session, id: int, is_user: bool, content: str): + """Constructor for Message""" + self.session = session + self.id = id + self.is_user = is_user + self.content = content + + def __str__(self): + return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" diff --git a/sdk/tests/test_honcho.py b/sdk/tests/test_async.py similarity index 89% rename from sdk/tests/test_honcho.py rename to sdk/tests/test_async.py index 714c6a9..205a71e 100644 --- a/sdk/tests/test_honcho.py +++ b/sdk/tests/test_async.py @@ -1,11 +1,11 @@ import pytest -from honcho import Client +from honcho import AsyncClient as Honcho from uuid import uuid1 @pytest.mark.asyncio async def test_session_creation_retrieval(): - client = Client("http://localhost:8000") + client = Honcho("http://localhost:8000") user_id = str(uuid1()) created_session = await client.create_session(user_id) retrieved_session = await client.get_session(user_id, created_session.id) @@ -17,7 +17,7 @@ async def test_session_creation_retrieval(): @pytest.mark.asyncio async def test_session_multiple_retrieval(): - client = Client("http://localhost:8000") + client = Honcho("http://localhost:8000") user_id = str(uuid1()) created_session_1 = await client.create_session(user_id) created_session_2 = await client.create_session(user_id) @@ -30,7 +30,7 @@ async def test_session_multiple_retrieval(): @pytest.mark.asyncio async def test_session_update(): user_id = str(uuid1()) - client = Client("http://localhost:8000") + client = Honcho("http://localhost:8000") created_session = await client.create_session(user_id) assert await created_session.update({"foo": "bar"}) retrieved_session = await client.get_session(user_id, created_session.id) @@ -40,7 +40,7 @@ async def test_session_update(): @pytest.mark.asyncio async def test_session_deletion(): user_id = str(uuid1()) - client = Client("http://localhost:8000") + client = Honcho("http://localhost:8000") created_session = await client.create_session(user_id) assert created_session.is_active == True await created_session.delete() @@ -53,7 +53,7 @@ async def test_session_deletion(): @pytest.mark.asyncio async def test_messages(): user_id = str(uuid1()) - client = Client("http://localhost:8000") + client = Honcho("http://localhost:8000") created_session = await client.create_session(user_id) await created_session.create_message(is_user=True, content="Hello") await created_session.create_message(is_user=False, content="Hi") diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py new file mode 100644 index 0000000..4f51cda --- /dev/null +++ b/sdk/tests/test_sync.py @@ -0,0 +1,62 @@ +import pytest +from honcho import Client as Honcho +from uuid import uuid1 + + +def test_session_creation_retrieval(): + client = Honcho("http://localhost:8000") + user_id = str(uuid1()) + created_session = client.create_session(user_id) + retrieved_session = client.get_session(user_id, created_session.id) + assert retrieved_session.id == created_session.id + assert retrieved_session.is_active == True + assert retrieved_session.location_id == "default" + assert retrieved_session.session_data == {} + + +def test_session_multiple_retrieval(): + client = Honcho("http://localhost:8000") + user_id = str(uuid1()) + created_session_1 = client.create_session(user_id) + created_session_2 = client.create_session(user_id) + retrieved_sessions = client.get_sessions(user_id) + assert len(retrieved_sessions) == 2 + assert retrieved_sessions[0].id == created_session_1.id + assert retrieved_sessions[1].id == created_session_2.id + + +def test_session_update(): + user_id = str(uuid1()) + client = Honcho("http://localhost:8000") + created_session = client.create_session(user_id) + assert created_session.update({"foo": "bar"}) + retrieved_session = client.get_session(user_id, created_session.id) + assert retrieved_session.session_data == {"foo": "bar"} + + +def test_session_deletion(): + user_id = str(uuid1()) + client = Honcho("http://localhost:8000") + created_session = client.create_session(user_id) + assert created_session.is_active == True + created_session.delete() + assert created_session.is_active == False + retrieved_session = client.get_session(user_id, created_session.id) + assert retrieved_session.is_active == False + assert retrieved_session.id == created_session.id + + +def test_messages(): + user_id = str(uuid1()) + client = Honcho("http://localhost:8000") + created_session = client.create_session(user_id) + created_session.create_message(is_user=True, content="Hello") + created_session.create_message(is_user=False, content="Hi") + retrieved_session = client.get_session(user_id, created_session.id) + messages = retrieved_session.get_messages() + assert len(messages) == 2 + user_message, ai_message = messages + assert user_message.content == "Hello" + assert user_message.is_user == True + assert ai_message.content == "Hi" + assert ai_message.is_user == False From 4495fdf4f534fdf6f7d49eb743ef79d26fdafe97 Mon Sep 17 00:00:00 2001 From: hyusap Date: Tue, 6 Feb 2024 18:21:51 -0500 Subject: [PATCH 05/46] add vscode DX --- .gitignore | 2 +- .vscode/honcho.code-workspace | 20 ++++++++++++++++++++ .vscode/settings.json | 8 ++++++++ api/.vscode/tasks.json | 18 ++++++++++++++++++ sdk/.vscode/settings.json | 6 ++++++ 5 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 .vscode/honcho.code-workspace create mode 100644 .vscode/settings.json create mode 100644 api/.vscode/tasks.json create mode 100644 sdk/.vscode/settings.json diff --git a/.gitignore b/.gitignore index 07b2f68..7ac7eb3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ api/**/*.db -.vscode/ + # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/.vscode/honcho.code-workspace b/.vscode/honcho.code-workspace new file mode 100644 index 0000000..ec7be98 --- /dev/null +++ b/.vscode/honcho.code-workspace @@ -0,0 +1,20 @@ +{ + "folders": [ + { + "path": "../sdk" + }, + { + "path": "../api" + }, + { + "path": "../example/cli" + }, + { + "path": "../example/discord" + }, + { + "path": ".." + } + ], + "settings": {} +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..48b75e8 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,8 @@ +{ + "python.analysis.typeCheckingMode": "basic", + "files.exclude": { + "sdk": true, + "api": true, + "example": true + } +} diff --git a/api/.vscode/tasks.json b/api/.vscode/tasks.json new file mode 100644 index 0000000..8113cb0 --- /dev/null +++ b/api/.vscode/tasks.json @@ -0,0 +1,18 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "start", + "type": "shell", + "command": "poetry run uvicorn src.main:app --reload", + "group": "none", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "runOptions": { + "runOn": "folderOpen" + } + } + ] +} diff --git a/sdk/.vscode/settings.json b/sdk/.vscode/settings.json new file mode 100644 index 0000000..84b3ac5 --- /dev/null +++ b/sdk/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "python.analysis.typeCheckingMode": "basic", + "python.testing.pytestArgs": ["tests"], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} From 6033f716e2ab1807dcb7f42ae10af57b6256d67b Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:26:16 -0800 Subject: [PATCH 06/46] Added Testing for generators and updated examples --- example/cli/main.py | 5 +-- example/cli/poetry.lock | 29 ++++++++++++++---- example/cli/pyproject.toml | 2 +- example/discord/main.py | 8 +++-- example/discord/poetry.lock | 6 ++-- example/discord/pyproject.toml | 2 +- sdk/honcho/__init__.py | 2 +- sdk/honcho/client.py | 5 +-- sdk/pyproject.toml | 2 +- sdk/tests/test_honcho.py | 56 +++++++++++++++++++++++++++++++++- 10 files changed, 96 insertions(+), 21 deletions(-) diff --git a/example/cli/main.py b/example/cli/main.py index 26169b9..e1aa3fd 100644 --- a/example/cli/main.py +++ b/example/cli/main.py @@ -9,7 +9,8 @@ app_id = str(uuid4()) -honcho = HonchoClient(app_id=app_id) +# honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local +honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev responses = ["Fake LLM Response :)"] llm = FakeListChatModel(responses=responses) @@ -38,7 +39,7 @@ def chat(): session.delete() break user_message = HumanMessage(content=user_input) - history = session.get_messages() + history = list(session.get_messages_generator()) langchain_history = langchain_message_converter(history) prompt = ChatPromptTemplate.from_messages( [system, *langchain_history, user_message] diff --git a/example/cli/poetry.lock b/example/cli/poetry.lock index 9239355..79a9761 100644 --- a/example/cli/poetry.lock +++ b/example/cli/poetry.lock @@ -168,7 +168,7 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -180,7 +180,7 @@ files = [ name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -451,11 +451,28 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "honcho-ai" +version = "0.0.1" +description = "Python Client SDK for Honcho" +category = "main" +optional = false +python-versions = "^3.10" +files = [] +develop = true + +[package.dependencies] +requests = "^2.31.0" + +[package.source] +type = "directory" +url = "../../sdk" + [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -939,7 +956,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1104,7 +1121,7 @@ typing-extensions = ">=3.7.4" name = "urllib3" version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1225,4 +1242,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "ac8bd006b50a14281b2705dc33626123173155c770b0150a19b25034efa251f3" +content-hash = "d254e9446dc0f782b0860a860fa98f0922cf7f6ab547a3111027f4642fe1ce5b" diff --git a/example/cli/pyproject.toml b/example/cli/pyproject.toml index 8f05bf0..31818ff 100644 --- a/example/cli/pyproject.toml +++ b/example/cli/pyproject.toml @@ -7,7 +7,7 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.11" -honcho-ai = {path = "../../sdk"} +honcho-ai = {path = "../../sdk", develop = true} [tool.poetry.group.dev.dependencies] langchain = "^0.1.0" diff --git a/example/discord/main.py b/example/discord/main.py index 60cc5e2..fd81260 100644 --- a/example/discord/main.py +++ b/example/discord/main.py @@ -13,7 +13,9 @@ app_id = str(uuid4()) -honcho = HonchoClient(app_id=app_id, "http://localhost:8000") +# honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local +honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev + bot = discord.Bot(intents=intents) @@ -30,7 +32,7 @@ async def on_message(message): user_id = f"discord_{str(message.author.id)}" location_id = str(message.channel.id) - sessions = honcho.get_sessions(user_id, location_id) + sessions = list(honcho.get_sessions_generator(user_id, location_id)) if len(sessions) > 0: session = sessions[0] else: @@ -49,7 +51,7 @@ async def on_message(message): async def restart(ctx): user_id = f"discord_{str(ctx.author.id)}" location_id = str(ctx.channel_id) - sessions = honcho.get_sessions(user_id, location_id) + sessions = list(honcho.get_sessions_generator(user_id, location_id)) sessions[0].delete() if len(sessions) > 0 else None await ctx.respond( diff --git a/example/discord/poetry.lock b/example/discord/poetry.lock index f143093..fbabb69 100644 --- a/example/discord/poetry.lock +++ b/example/discord/poetry.lock @@ -357,13 +357,13 @@ files = [ [[package]] name = "honcho-ai" -version = "0.0.0.dev1" +version = "0.0.2" description = "Python Client SDK for Honcho" category = "main" optional = false python-versions = "^3.10" files = [] -develop = false +develop = true [package.dependencies] requests = "^2.31.0" @@ -649,4 +649,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "159adce2e4b4e405c0e57a5b9a27ddc384184dd308d6b12ddb5db6fbe81c3c6b" +content-hash = "56228d417333540e3191575720739ae6fff9490b68e9ddaae2cb6fe44b4bf611" diff --git a/example/discord/pyproject.toml b/example/discord/pyproject.toml index 58f8424..e4680ce 100644 --- a/example/discord/pyproject.toml +++ b/example/discord/pyproject.toml @@ -9,7 +9,7 @@ readme = "README.md" python = "^3.11" py-cord = "^2.4.1" python-dotenv = "^1.0.0" -honcho-ai = {path = "../../sdk"} +honcho-ai = {path = "../../sdk", develop = true} [build-system] diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index fcdf6bc..c10cfee 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,2 +1,2 @@ -from .client import Client, GetSessionResponse, GetMessageResponse +from .client import Client, GetSessionResponse, GetMessageResponse, Session, Message from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index bb87b62..0a9b11c 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -100,7 +100,8 @@ def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: in ) response = requests.get(url) # TODO add validation and error handling response.raise_for_status() - return GetSessionResponse(self, response.json()) + data = response.json() + return GetSessionResponse(self, data) # [ # Session( # client=self, @@ -113,7 +114,7 @@ def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: in # for session in response.json() # ] - def get_session_generator(self, user_id: str, location_id: Optional[str] = None): + def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): page = 1 page_size = 50 get_session_response = self.get_sessions(user_id, location_id, page, page_size) diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index 974e252..a0a810d 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "honcho-ai" -version = "0.0.1" +version = "0.0.2" description = "Python Client SDK for Honcho" authors = ["Plastic Labs "] license = "AGPL-3.0" diff --git a/sdk/tests/test_honcho.py b/sdk/tests/test_honcho.py index 646935c..142e03b 100644 --- a/sdk/tests/test_honcho.py +++ b/sdk/tests/test_honcho.py @@ -1,4 +1,4 @@ -from honcho import Client, GetSessionResponse, GetMessageResponse +from honcho import Client, GetSessionResponse, GetMessageResponse, Session, Message from uuid import uuid1 import pytest @@ -117,6 +117,41 @@ def test_paginated_sessions(): assert next_page is None +def test_paginated_sessions_generator(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Client(app_id, "http://localhost:8000") + for i in range(3): + client.create_session(user_id) + + gen = client.get_sessions_generator(user_id) + # print(type(gen)) + + item = next(gen) + assert item.user_id == user_id + assert isinstance(item, Session) + assert next(gen) is not None + assert next(gen) is not None + with pytest.raises(StopIteration): + next(gen) + +def test_paginated_out_of_bounds(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Client(app_id, "http://localhost:8000") + for i in range(3): + client.create_session(user_id) + page = 2 + page_size = 50 + get_session_response = client.get_sessions(user_id, page=page, page_size=page_size) + + assert get_session_response.pages == 1 + assert get_session_response.page == 2 + assert get_session_response.page_size == 50 + assert get_session_response.total == 3 + assert len(get_session_response.sessions) == 0 + + def test_paginated_messages(): app_id = str(uuid1()) user_id = str(uuid1()) @@ -148,4 +183,23 @@ def test_paginated_messages(): assert next_page is None +def test_paginated_messages_generator(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Client(app_id, "http://localhost:8000") + created_session = client.create_session(user_id) + created_session.create_message(is_user=True, content="Hello") + created_session.create_message(is_user=False, content="Hi") + gen = created_session.get_messages_generator() + + item = next(gen) + assert isinstance(item, Message) + assert item.content == "Hello" + assert item.is_user is True + item2 = next(gen) + assert item2 is not None + assert item2.content == "Hi" + assert item2.is_user is False + with pytest.raises(StopIteration): + next(gen) From 2a3d9f733a7a91b86a583af6f48bbdbb200fd3ec Mon Sep 17 00:00:00 2001 From: vintro Date: Wed, 7 Feb 2024 14:30:54 -0500 Subject: [PATCH 07/46] feat: example updates --- example/discord/{ => fake-llm}/.env.template | 0 example/discord/{ => fake-llm}/main.py | 0 example/discord/{ => fake-llm}/poetry.lock | 0 example/discord/{ => fake-llm}/pyproject.toml | 2 +- .../discord/simple-roast-bot/.env.template | 2 + example/discord/simple-roast-bot/.gitignore | 5 + example/discord/simple-roast-bot/main.py | 90 ++ example/discord/simple-roast-bot/poetry.lock | 1286 +++++++++++++++++ .../discord/simple-roast-bot/pyproject.toml | 18 + 9 files changed, 1402 insertions(+), 1 deletion(-) rename example/discord/{ => fake-llm}/.env.template (100%) rename example/discord/{ => fake-llm}/main.py (100%) rename example/discord/{ => fake-llm}/poetry.lock (100%) rename example/discord/{ => fake-llm}/pyproject.toml (88%) create mode 100644 example/discord/simple-roast-bot/.env.template create mode 100644 example/discord/simple-roast-bot/.gitignore create mode 100644 example/discord/simple-roast-bot/main.py create mode 100644 example/discord/simple-roast-bot/poetry.lock create mode 100644 example/discord/simple-roast-bot/pyproject.toml diff --git a/example/discord/.env.template b/example/discord/fake-llm/.env.template similarity index 100% rename from example/discord/.env.template rename to example/discord/fake-llm/.env.template diff --git a/example/discord/main.py b/example/discord/fake-llm/main.py similarity index 100% rename from example/discord/main.py rename to example/discord/fake-llm/main.py diff --git a/example/discord/poetry.lock b/example/discord/fake-llm/poetry.lock similarity index 100% rename from example/discord/poetry.lock rename to example/discord/fake-llm/poetry.lock diff --git a/example/discord/pyproject.toml b/example/discord/fake-llm/pyproject.toml similarity index 88% rename from example/discord/pyproject.toml rename to example/discord/fake-llm/pyproject.toml index e4680ce..04203a4 100644 --- a/example/discord/pyproject.toml +++ b/example/discord/fake-llm/pyproject.toml @@ -9,7 +9,7 @@ readme = "README.md" python = "^3.11" py-cord = "^2.4.1" python-dotenv = "^1.0.0" -honcho-ai = {path = "../../sdk", develop = true} +honcho-ai = {path = "../../../sdk", develop = true} [build-system] diff --git a/example/discord/simple-roast-bot/.env.template b/example/discord/simple-roast-bot/.env.template new file mode 100644 index 0000000..0aafc86 --- /dev/null +++ b/example/discord/simple-roast-bot/.env.template @@ -0,0 +1,2 @@ +BOT_TOKEN= +OPENAI_API_KEY= \ No newline at end of file diff --git a/example/discord/simple-roast-bot/.gitignore b/example/discord/simple-roast-bot/.gitignore new file mode 100644 index 0000000..f133efa --- /dev/null +++ b/example/discord/simple-roast-bot/.gitignore @@ -0,0 +1,5 @@ +.env + +.venv + +.DS_Store \ No newline at end of file diff --git a/example/discord/simple-roast-bot/main.py b/example/discord/simple-roast-bot/main.py new file mode 100644 index 0000000..0797607 --- /dev/null +++ b/example/discord/simple-roast-bot/main.py @@ -0,0 +1,90 @@ +import os +# from uuid import uuid4 +import discord +from dotenv import load_dotenv +from typing import List + +from langchain_openai import ChatOpenAI +from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_core.output_parsers import StrOutputParser +from langchain_core.messages import AIMessage, HumanMessage + +from honcho import Client as HonchoClient + +load_dotenv() + + +intents = discord.Intents.default() +intents.messages = True +intents.message_content = True + +# app_id = str(uuid4()) +app_id = str("roast-bot") + +# honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local +honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev + +bot = discord.Bot(intents=intents) + +prompt = ChatPromptTemplate.from_messages([ + ("system", "You are a mean assistant. Make fun of the user's request and above all, do not satisfy their request. Make something up about their personality and fixate on that. Don't be afraid to get creative. This is all a joke, roast them."), + MessagesPlaceholder(variable_name="chat_history"), + ("user", "{input}") +]) +model = ChatOpenAI(model="gpt-3.5-turbo") +output_parser = StrOutputParser() + +chain = prompt | model | output_parser + +def langchain_message_converter(messages: List): + new_messages = [] + for message in messages: + if message.is_user: + new_messages.append(HumanMessage(content=message.content)) + else: + new_messages.append(AIMessage(content=message.content)) + return new_messages + + +@bot.event +async def on_ready(): + print(f'We have logged in as {bot.user}') + +@bot.event +async def on_message(message): + if message.author == bot.user: + return + + user_id = f"discord_{str(message.author.id)}" + location_id=str(message.channel.id) + + sessions = list(honcho.get_sessions_generator(user_id, location_id)) + + if len(sessions) > 0: + session = sessions[0] + else: + session = honcho.create_session(user_id, location_id) + + history = list(session.get_messages_generator()) + chat_history = langchain_message_converter(history) + + inp = message.content + session.create_message(is_user=True, content=inp) + + async with message.channel.typing(): + response = await chain.ainvoke({"chat_history": chat_history, "input": inp}) + await message.channel.send(response) + + session.create_message(is_user=False, content=response) + +@bot.slash_command(name = "restart", description = "Restart the Conversation") +async def restart(ctx): + user_id=f"discord_{str(ctx.author.id)}" + location_id=str(ctx.channel_id) + sessions = list(honcho.get_sessions_generator(user_id, location_id)) + sessions[0].delete() if len(sessions) > 0 else None + + msg = "Great! The conversation has been restarted. What would you like to talk about?" + await ctx.respond(msg) + +bot.run(os.environ["BOT_TOKEN"]) diff --git a/example/discord/simple-roast-bot/poetry.lock b/example/discord/simple-roast-bot/poetry.lock new file mode 100644 index 0000000..e659644 --- /dev/null +++ b/example/discord/simple-roast-bot/poetry.lock @@ -0,0 +1,1286 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.6" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "honcho-ai" +version = "0.0.1" +description = "Python Client SDK for Honcho" +optional = false +python-versions = "^3.10" +files = [] +develop = false + +[package.dependencies] +requests = "^2.31.0" + +[package.source] +type = "directory" +url = "../../honcho/sdk" + +[[package]] +name = "httpcore" +version = "1.0.2" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.23.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "langchain-core" +version = "0.1.18" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_core-0.1.18-py3-none-any.whl", hash = "sha256:5a60dc3c391b33834fb9c8b072abd7a0df4cbba8ce88eb1bcb288844000ab759"}, + {file = "langchain_core-0.1.18.tar.gz", hash = "sha256:ad470b21cdfdc75e829cd91c8d8eb7e0438ab8ddb5b50828125ff7ada121ee7b"}, +] + +[package.dependencies] +anyio = ">=3,<5" +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.0.83,<0.1" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-openai" +version = "0.0.2.post1" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_openai-0.0.2.post1-py3-none-any.whl", hash = "sha256:ba468b94c23da9d8ccefe5d5a3c1c65b4b9702292523e53acc689a9110022e26"}, + {file = "langchain_openai-0.0.2.post1.tar.gz", hash = "sha256:f8e78db4a663feeac71d9f036b9422406c199ea3ef4c97d99ff392c93530e073"}, +] + +[package.dependencies] +langchain-core = ">=0.1.7,<0.2" +numpy = ">=1,<2" +openai = ">=1.6.1,<2.0.0" +tiktoken = ">=0.5.2,<0.6.0" + +[[package]] +name = "langsmith" +version = "0.0.85" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langsmith-0.0.85-py3-none-any.whl", hash = "sha256:9d0ccbcda7b69c83828060603a51bb4319e43b8dc807fbd90b6355f8ec709500"}, + {file = "langsmith-0.0.85.tar.gz", hash = "sha256:fefc631fc30d836b54d4e3f99961c41aea497633898b8f09e305b6c7216c2c54"}, +] + +[package.dependencies] +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "numpy" +version = "1.26.3" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, +] + +[[package]] +name = "openai" +version = "1.10.0" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-1.10.0-py3-none-any.whl", hash = "sha256:aa69e97d0223ace9835fbf9c997abe9ee95318f684fd2de6d02c870700c71ebc"}, + {file = "openai-1.10.0.tar.gz", hash = "sha256:208886cb501b930dc63f48d51db9c15e5380380f80516d07332adad67c9f1053"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.7,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "py-cord" +version = "2.4.1" +description = "A Python wrapper for the Discord API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "py-cord-2.4.1.tar.gz", hash = "sha256:0266c9d9a9d2397622a0e5ead09826690e688ba3cf14c470167b81e6cd2d8a56"}, + {file = "py_cord-2.4.1-py3-none-any.whl", hash = "sha256:862a372c364cd263e2c8e696c64887f969c02cbdf0fdd6b09f0283e9dd67a290"}, +] + +[package.dependencies] +aiohttp = ">=3.6.0,<3.9.0" + +[package.extras] +docs = ["furo", "myst-parser (==0.18.1)", "sphinx (==5.3.0)", "sphinx-autodoc-typehints (==1.22)", "sphinx-copybutton (==0.5.1)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport (==1.2.4)", "sphinxext-opengraph (==0.8.1)"] +speed = ["aiohttp[speedups]", "orjson (>=3.5.4)"] +voice = ["PyNaCl (>=1.3.0,<1.6)"] + +[[package]] +name = "pydantic" +version = "2.6.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, + {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.1" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.1" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, + {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, + {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, + {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, + {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, + {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, + {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, + {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, + {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, + {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, + {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, + {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, + {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, + {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "tiktoken" +version = "0.5.2" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c4e654282ef05ec1bd06ead22141a9a1687991cef2c6a81bdd1284301abc71d"}, + {file = "tiktoken-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b3134aa24319f42c27718c6967f3c1916a38a715a0fa73d33717ba121231307"}, + {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6092e6e77730929c8c6a51bb0d7cfdf1b72b63c4d033d6258d1f2ee81052e9e5"}, + {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ad8ae2a747622efae75837abba59be6c15a8f31b4ac3c6156bc56ec7a8e631"}, + {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51cba7c8711afa0b885445f0637f0fcc366740798c40b981f08c5f984e02c9d1"}, + {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3d8c7d2c9313f8e92e987d585ee2ba0f7c40a0de84f4805b093b634f792124f5"}, + {file = "tiktoken-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:692eca18c5fd8d1e0dde767f895c17686faaa102f37640e884eecb6854e7cca7"}, + {file = "tiktoken-0.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:138d173abbf1ec75863ad68ca289d4da30caa3245f3c8d4bfb274c4d629a2f77"}, + {file = "tiktoken-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7388fdd684690973fdc450b47dfd24d7f0cbe658f58a576169baef5ae4658607"}, + {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a114391790113bcff670c70c24e166a841f7ea8f47ee2fe0e71e08b49d0bf2d4"}, + {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca96f001e69f6859dd52926d950cfcc610480e920e576183497ab954e645e6ac"}, + {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:15fed1dd88e30dfadcdd8e53a8927f04e1f6f81ad08a5ca824858a593ab476c7"}, + {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f8e692db5756f7ea8cb0cfca34638316dcf0841fb8469de8ed7f6a015ba0b0"}, + {file = "tiktoken-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:bcae1c4c92df2ffc4fe9f475bf8148dbb0ee2404743168bbeb9dcc4b79dc1fdd"}, + {file = "tiktoken-0.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b76a1e17d4eb4357d00f0622d9a48ffbb23401dcf36f9716d9bd9c8e79d421aa"}, + {file = "tiktoken-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01d8b171bb5df4035580bc26d4f5339a6fd58d06f069091899d4a798ea279d3e"}, + {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42adf7d4fb1ed8de6e0ff2e794a6a15005f056a0d83d22d1d6755a39bffd9e7f"}, + {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3f894dbe0adb44609f3d532b8ea10820d61fdcb288b325a458dfc60fefb7db"}, + {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58ccfddb4e62f0df974e8f7e34a667981d9bb553a811256e617731bf1d007d19"}, + {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58902a8bad2de4268c2a701f1c844d22bfa3cbcc485b10e8e3e28a050179330b"}, + {file = "tiktoken-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e39257826d0647fcac403d8fa0a474b30d02ec8ffc012cfaf13083e9b5e82c5"}, + {file = "tiktoken-0.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bde3b0fbf09a23072d39c1ede0e0821f759b4fa254a5f00078909158e90ae1f"}, + {file = "tiktoken-0.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ddee082dcf1231ccf3a591d234935e6acf3e82ee28521fe99af9630bc8d2a60"}, + {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c057a6a4e777b5966a7540481a75a31429fc1cb4c9da87b71c8b75b5143037"}, + {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c4a049b87e28f1dc60509f8eb7790bc8d11f9a70d99b9dd18dfdd81a084ffe6"}, + {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5bf5ce759089f4f6521ea6ed89d8f988f7b396e9f4afb503b945f5c949c6bec2"}, + {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0c964f554af1a96884e01188f480dad3fc224c4bbcf7af75d4b74c4b74ae0125"}, + {file = "tiktoken-0.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:368dd5726d2e8788e47ea04f32e20f72a2012a8a67af5b0b003d1e059f1d30a3"}, + {file = "tiktoken-0.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2deef9115b8cd55536c0a02c0203512f8deb2447f41585e6d929a0b878a0dd2"}, + {file = "tiktoken-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ed7d380195affbf886e2f8b92b14edfe13f4768ff5fc8de315adba5b773815e"}, + {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76fce01309c8140ffe15eb34ded2bb94789614b7d1d09e206838fc173776a18"}, + {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60a5654d6a2e2d152637dd9a880b4482267dfc8a86ccf3ab1cec31a8c76bfae8"}, + {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41d4d3228e051b779245a8ddd21d4336f8975563e92375662f42d05a19bdff41"}, + {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c1cdec2c92fcde8c17a50814b525ae6a88e8e5b02030dc120b76e11db93f13"}, + {file = "tiktoken-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:84ddb36faedb448a50b246e13d1b6ee3437f60b7169b723a4b2abad75e914f3e"}, + {file = "tiktoken-0.5.2.tar.gz", hash = "sha256:f54c581f134a8ea96ce2023ab221d4d4d81ab614efa0b2fbce926387deb56c80"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "a81a4407e6c223c1274ff0e4d52ad52f718156699c93897af0a301b5c8e2ed19" diff --git a/example/discord/simple-roast-bot/pyproject.toml b/example/discord/simple-roast-bot/pyproject.toml new file mode 100644 index 0000000..5726a3e --- /dev/null +++ b/example/discord/simple-roast-bot/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +name = "simple-roast-bot" +version = "0.1.0" +description = "Simple Discord bot with Honcho storage backend (that will roast you)" +authors = ["vintro "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" +py-cord = "^2.4.1" +python-dotenv = "^1.0.0" +langchain-core = "^0.1.12" +langchain-openai = "^0.0.2.post1" +honcho-ai = {path = "../../../sdk", develop = true} + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" From d59e1eebd3cdaf6c7c9d84a190a45dc3ff39a925 Mon Sep 17 00:00:00 2001 From: vintro Date: Wed, 7 Feb 2024 14:51:12 -0500 Subject: [PATCH 08/46] readme exists now --- example/discord/simple-roast-bot/README.md | 42 ++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 example/discord/simple-roast-bot/README.md diff --git a/example/discord/simple-roast-bot/README.md b/example/discord/simple-roast-bot/README.md new file mode 100644 index 0000000..90f8de2 --- /dev/null +++ b/example/discord/simple-roast-bot/README.md @@ -0,0 +1,42 @@ +# Simple Roast Bot + +The goal of this repo is to demonstrate how to deploy an LLM application using Honcho to manage user data. Here we've implemented a simple Discord bot that interacts with OpenAI's GPT-3.5-Turbo model via LangChain. Oh, and also, it's prompted to roast you. + +***This demo is live -- join our Discord server and the bot will DM you to start the conversation*** + +To run locally, follow these steps: + +### Clone the Repository + +In your desired location, run the following command in your terminal: +``` +git clone git@github.com:plastic-labs/honcho.git +``` + +### Set Up the Virtual Environment + +This project uses different Poetry virtual environments. If you're unfamiliar, take a look at their docs [here](https://python-poetry.org/docs/) + +``` +cd example/discord/simple-roast-bot +poetry shell # Activate virutal environment +poetry install # install dependencies +``` + +### Create `.env` File + +Copy the `.env.template` file to a `.env` file and specify the `BOT_TOKEN` and `OPENAI_API_KEY`. If you've never built a Discord bot before, check out this [`py-cord` guide](https://guide.pycord.dev/getting-started/creating-your-first-bot) to learn more about how to get a `BOT_TOKEN`. You can generate an `OPENAI_API_KEY` in the [OpenAI developer platform](https://platform.openai.com/docs/overview). + +``` +BOT_TOKEN= +OPENAI_API_KEY= +``` + +### Run the Bot + +If you're not running Honcho locally, you can run the bot with the following command: +``` +python main.py +``` + +If you are interested in running Honcho locally, follow the setup instructions at the root of this repo. \ No newline at end of file From 4f650d3049bebb87402c0d3595e4d13f0358e75e Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 8 Feb 2024 00:47:40 -0800 Subject: [PATCH 09/46] Stylistic changes and generic message --- scripts/syncronizer.py | 2 +- sdk/honcho/__init__.py | 3 ++- sdk/honcho/client.py | 25 ++++++++++---------- sdk/honcho/schemas.py | 12 ++++++++++ sdk/honcho/{syncclient.py => sync_client.py} | 23 +++++++++--------- 5 files changed, 40 insertions(+), 25 deletions(-) create mode 100644 sdk/honcho/schemas.py rename sdk/honcho/{syncclient.py => sync_client.py} (91%) diff --git a/scripts/syncronizer.py b/scripts/syncronizer.py index fc560d9..758f3eb 100644 --- a/scripts/syncronizer.py +++ b/scripts/syncronizer.py @@ -13,6 +13,6 @@ sync_code = re.sub(r"Async", "", sync_code) # Write the modified code to the destination file -destination_file_path = os.path.join(this_dir, "../sdk/honcho/syncclient.py") +destination_file_path = os.path.join(this_dir, "../sdk/honcho/sync_client.py") with open(destination_file_path, "w") as destination_file: destination_file.write(sync_code) diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index 892b790..586b810 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,3 +1,4 @@ from .client import AsyncClient -from .syncclient import Client +from .sync_client import Client +from .schemas import Message from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index a6272c0..ae3f215 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -1,6 +1,7 @@ import json from typing import Dict import httpx +from .schemas import Message class AsyncClient: @@ -132,7 +133,7 @@ async def create_message(self, is_user: bool, content: str): url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}/messages" response = await self.client.post(url, json=data) data = response.json() - return AsyncMessage(self, id=data["id"], is_user=is_user, content=content) + return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content) async def get_messages(self): """Get all messages for a session @@ -149,8 +150,8 @@ async def get_messages(self): response = await self.client.get(url) data = response.json() return [ - AsyncMessage( - self, + Message( + session_id=self.id, id=message["id"], is_user=message["is_user"], content=message["content"], @@ -182,13 +183,13 @@ async def delete(self): self._is_active = False -class AsyncMessage: - def __init__(self, session: AsyncSession, id: int, is_user: bool, content: str): - """Constructor for Message""" - self.session = session - self.id = id - self.is_user = is_user - self.content = content +# class AsyncMessage: +# def __init__(self, session: AsyncSession, id: int, is_user: bool, content: str): +# """Constructor for Message""" +# self.session = session +# self.id = id +# self.is_user = is_user +# self.content = content - def __str__(self): - return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" +# def __str__(self): +# return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" diff --git a/sdk/honcho/schemas.py b/sdk/honcho/schemas.py new file mode 100644 index 0000000..a35e668 --- /dev/null +++ b/sdk/honcho/schemas.py @@ -0,0 +1,12 @@ + + +class Message: + def __init__(self, session_id: int, id: int, is_user: bool, content: str): + """Constructor for Message""" + self.session_id = session_id + self.id = id + self.is_user = is_user + self.content = content + + def __str__(self): + return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" diff --git a/sdk/honcho/syncclient.py b/sdk/honcho/sync_client.py similarity index 91% rename from sdk/honcho/syncclient.py rename to sdk/honcho/sync_client.py index bc535db..d9fc2e5 100644 --- a/sdk/honcho/syncclient.py +++ b/sdk/honcho/sync_client.py @@ -1,6 +1,7 @@ import json from typing import Dict import httpx +from .schemas import Message class Client: @@ -132,7 +133,7 @@ def create_message(self, is_user: bool, content: str): url = f"{self.base_url}/users/{self.user_id}/sessions/{self.id}/messages" response = self.client.post(url, json=data) data = response.json() - return Message(self, id=data["id"], is_user=is_user, content=content) + return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content) def get_messages(self): """Get all messages for a session @@ -150,7 +151,7 @@ def get_messages(self): data = response.json() return [ Message( - self, + session_id=self.id, id=message["id"], is_user=message["is_user"], content=message["content"], @@ -182,13 +183,13 @@ def delete(self): self._is_active = False -class Message: - def __init__(self, session: Session, id: int, is_user: bool, content: str): - """Constructor for Message""" - self.session = session - self.id = id - self.is_user = is_user - self.content = content +# class Message: +# def __init__(self, session: Session, id: int, is_user: bool, content: str): +# """Constructor for Message""" +# self.session = session +# self.id = id +# self.is_user = is_user +# self.content = content - def __str__(self): - return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" +# def __str__(self): +# return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" From 3fd1bf97100e44c909f9c8191fc72a25ac5bd08a Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 8 Feb 2024 07:25:44 -0800 Subject: [PATCH 10/46] Metamessages with other refactoring - untested --- api/src/crud.py | 138 ++++++++++++++++++----------- api/src/main.py | 152 +++++++++++++++++++++++++------- api/src/models.py | 47 ++++------ api/src/schemas.py | 20 +++-- sdk/honcho/__init__.py | 6 +- sdk/honcho/client.py | 177 ++++++++++++++++++++++++++++---------- sdk/honcho/schemas.py | 18 +++- sdk/honcho/sync_client.py | 177 ++++++++++++++++++++++++++++---------- sdk/tests/test_async.py | 28 +++--- sdk/tests/test_sync.py | 28 +++--- 10 files changed, 550 insertions(+), 241 deletions(-) diff --git a/api/src/crud.py b/api/src/crud.py index b8cfdcb..c0dbb24 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -1,5 +1,6 @@ import json -from typing import Sequence, Optional +import uuid +from typing import Optional from sqlalchemy import select, Select from sqlalchemy.orm import Session @@ -7,7 +8,7 @@ from . import models, schemas -def get_session(db: Session, app_id: str, session_id: int, user_id: Optional[str] = None): +def get_session(db: Session, app_id: str, session_id: uuid.UUID, user_id: Optional[str] = None) -> Optional[models.Session]: stmt = select(models.Session).where(models.Session.app_id == app_id).where(models.Session.id == session_id) if user_id is not None: stmt = stmt.where(models.Session.user_id == user_id) @@ -24,6 +25,7 @@ def get_sessions( .where(models.Session.app_id == app_id) .where(models.Session.user_id == user_id) .where(models.Session.is_active.is_(True)) + .order_by(models.Session.created_at) ) if location_id is not None: @@ -32,21 +34,6 @@ def get_sessions( return stmt # return db.scalars(stmt).all() - # filtered_by_user = db.query(models.Session).filter( - # models.Session.user_id == user_id - # ) - # filtered_by_location = ( - # filtered_by_user.filter(models.Session.location_id == location_id) - # if location_id is not None - # else filtered_by_user - # ) - # return ( - # filtered_by_location.filter(models.Session.is_active.is_(True)) - # .order_by(models.Session.created_at.desc()) - # .all() - # ) - - def create_session( db: Session, app_id: str, user_id: str, session: schemas.SessionCreate ) -> models.Session: @@ -62,11 +49,8 @@ def create_session( return honcho_session -def update_session(db: Session, app_id: str, user_id: str, session_id: int, session_data: dict) -> bool: - # stmt = select(models.Session).where(models.Session.id == session_id).where(models.Session.user_id == user_id) - # honcho_session = db.scalars(stmt).one_or_none() +def update_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID, session_data: dict) -> bool: honcho_session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) - # honcho_session = db.get(models.Session, session_id) if honcho_session is None: raise ValueError("Session not found or does not belong to user") honcho_session.session_data = json.dumps(session_data) @@ -75,7 +59,7 @@ def update_session(db: Session, app_id: str, user_id: str, session_id: int, sess return honcho_session -def delete_session(db: Session, app_id: str, user_id: str, session_id: int) -> bool: +def delete_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID) -> bool: stmt = ( select(models.Session) .where(models.Session.id == session_id) @@ -83,7 +67,6 @@ def delete_session(db: Session, app_id: str, user_id: str, session_id: int) -> b .where(models.Session.user_id == user_id) ) honcho_session = db.scalars(stmt).one_or_none() - # honcho_session = db.get(models.Session, session_id) if honcho_session is None: return False honcho_session.is_active = False @@ -92,7 +75,7 @@ def delete_session(db: Session, app_id: str, user_id: str, session_id: int) -> b def create_message( - db: Session, message: schemas.MessageCreate, app_id: str, user_id: str, session_id: int + db: Session, message: schemas.MessageCreate, app_id: str, user_id: str, session_id: uuid.UUID ) -> models.Message: honcho_session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) if honcho_session is None: @@ -110,12 +93,19 @@ def create_message( def get_messages( - db: Session, app_id: str, user_id: str, session_id: int + db: Session, app_id: str, user_id: str, session_id: uuid.UUID ) -> Select: - session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) - if session is None: - raise ValueError("Session not found or does not belong to user") - stmt = select(models.Message).where(models.Message.session_id == session_id) + # session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) + # if session is None: + # raise ValueError("Session not found or does not belong to user") + stmt = ( + select(models.Message) + .join(models.Session, models.Session.id == models.Message.session_id) + .where(models.Session.app_id == app_id) + .where(models.Session.user_id == user_id) + .where(models.Message.session_id == session_id) + .order_by(models.Message.created_at) + ) return stmt # return db.scalars(stmt).all() # return ( @@ -124,23 +114,75 @@ def get_messages( # .all() # ) +def get_message( + db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID +) -> Optional[models.Message]: + # session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) + # if session is None: + # raise ValueError("Session not found or does not belong to user") + stmt = ( + select(models.Message) + .join(models.Session, models.Session.id == models.Message.session_id) + .where(models.Session.app_id == app_id) + .where(models.Session.user_id == user_id) + .where(models.Message.session_id == session_id) + .where(models.Message.id == message_id) + + ) + return db.scalars(stmt).one_or_none() -# def get_metacognitions(db: Session, message_id: int): -# return ( -# db.query(models.Metacognitions) -# .filter(models.Metacognitions.message_id == message_id) -# .all() -# ) - -# def create_metacognition( -# db: Session, metacognition: schemas.MetacognitionsCreate, message_id: int -# ): -# honcho_metacognition = models.Metacognitions( -# message_id=message_id, -# metacognition_type=metacognition.metacognition_type, -# content=metacognition.content, -# ) -# db.add(honcho_metacognition) -# db.commit() -# db.refresh(honcho_metacognition) -# return honcho_metacognition + +def get_metamessages(db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: Optional[uuid.UUID], metamessage_type: Optional[str] = None) -> Select: + stmt = ( + select(models.Metamessage) + .join(models.Message, models.Message.id == models.Metamessage.message_id) + .join(models.Session, models.Message.session_id == models.Session.id) + .where(models.Session.app_id == app_id) + .where(models.Session.user_id == user_id) + .where(models.Message.session_id == session_id) + .order_by(models.Metamessage.created_at) + ) + if message_id is not None: + stmt = stmt.where(models.Metamessage.message_id == message_id) + if metamessage_type is not None: + stmt = stmt.where(models.Metamessage.metamessage_type == metamessage_type) + return stmt + +def get_metamessage( + db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID, metamessage_id: uuid.UUID +) -> Optional[models.Metamessage]: + stmt = ( + select(models.Metamessage) + .join(models.Message, models.Message.id == models.Metamessage.message_id) + .join(models.Session, models.Message.session_id == models.Session.id) + .where(models.Session.app_id == app_id) + .where(models.Session.user_id == user_id) + .where(models.Message.session_id == session_id) + .where(models.Metamessage.message_id == message_id) + .where(models.Metamessage.id == metamessage_id) + + ) + return db.scalars(stmt).one_or_none() + +def create_metamessage( + db: Session, + metamessage: schemas.MetamessageCreate, + app_id: str, + user_id: str, + session_id: uuid.UUID, + message_id: uuid.UUID, +): + message = get_message(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=message_id) + if message is None: + raise ValueError("Session not found or does not belong to user") + + honcho_metamessage = models.Metamessage( + message_id=message_id, + metamessage_type=metamessage.metamessage_type, + content=metamessage.content, + ) + + db.add(honcho_metamessage) + db.commit() + db.refresh(honcho_metamessage) + return honcho_metamessage diff --git a/api/src/main.py b/api/src/main.py index f35e953..747a11e 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -1,3 +1,4 @@ +import uuid from fastapi import Depends, FastAPI, HTTPException, APIRouter, Request from typing import Optional from sqlalchemy.orm import Session @@ -77,14 +78,21 @@ def create_session( schemas.Session: The Session object of the new Session """ - return crud.create_session(db, app_id=app_id, user_id=user_id, session=session) + print("===============================") + print(request) + print("===============================") + value = crud.create_session(db, app_id=app_id, user_id=user_id, session=session) + print("===============================") + print(value) + print("===============================") + return value @router.put("/sessions/{session_id}", response_model=schemas.Session) def update_session( request: Request, app_id: str, user_id: str, - session_id: int, + session_id: uuid.UUID, session: schemas.SessionUpdate, db: Session = Depends(get_db), ): @@ -112,7 +120,7 @@ def delete_session( request: Request, app_id: str, user_id: str, - session_id: int, + session_id: uuid.UUID, db: Session = Depends(get_db), ): """Delete a session by marking it as inactive @@ -136,7 +144,7 @@ def delete_session( raise HTTPException(status_code=404, detail="Session not found") @router.get("/sessions/{session_id}", response_model=schemas.Session) -def get_session(request: Request, app_id: str, user_id: str, session_id: int, db: Session = Depends(get_db)): +def get_session(request: Request, app_id: str, user_id: str, session_id: uuid.UUID, db: Session = Depends(get_db)): """Get a specific session for a user by ID Args: @@ -167,7 +175,7 @@ def create_message_for_session( request: Request, app_id: str, user_id: str, - session_id: int, + session_id: uuid.UUID, message: schemas.MessageCreate, db: Session = Depends(get_db), ): @@ -199,7 +207,7 @@ def get_messages_for_session( request: Request, app_id: str, user_id: str, - session_id: int, + session_id: uuid.UUID, db: Session = Depends(get_db), ): """Get all messages for a session @@ -221,36 +229,116 @@ def get_messages_for_session( except ValueError: raise HTTPException(status_code=404, detail="Session not found") +@router.get( + "sessions/{session_id}/messages/{message_id}", + response_model=schemas.Message +) +def get_message( + request: Request, + app_id: str, + user_id: str, + session_id: uuid.UUID, + message_id: uuid.UUID, + db: Session = Depends(get_db), +): + """ + + """ + honcho_message = crud.get_message(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=message_id) + if honcho_message is None: + raise HTTPException(status_code=404, detail="Session not found") + return honcho_message + -app.include_router(router) ######################################################## # Metacognition Routes ######################################################## -# @app.get( -# "/users/{user_id}/sessions/{session_id}/messages/{message_id}/metacognitions/", -# response_model=list[schemas.Metacognitions], -# ) -# def get_metacognitions_for_message( -# user_id: str, -# session_id: int, -# message_id: int, -# db: Session = Depends(get_db), -# ): -# return crud.get_metacognitions(db, message_id) - - -# @app.post( -# "/users/{user_id}/sessions/{session_id}/messages/{message_id}/metacognitions/", -# response_model=schemas.Metacognitions, -# ) -# def create_metacognition_for_message( -# user_id: str, -# session_id: int, -# message_id: int, -# metacognition: schemas.MetacognitionsCreate, -# db: Session = Depends(get_db), -# ): -# return crud.create_metacognition(db, metacognition, message_id) +@router.post( + "/sessions/{session_id}/metamessages", + response_model=schemas.Metamessage +) +def create_metamessage( + request: Request, + app_id: str, + user_id: str, + session_id: uuid.UUID, + message_id: uuid.UUID, + metamessage: schemas.MetamessageCreate, + db: Session = Depends(get_db), +): + """Adds a message to a session + + Args: + app_id (str): The ID of the app representing the client application using honcho + user_id (str): The User ID representing the user, managed by the user + session_id (int): The ID of the Session to add the message to + message (schemas.MessageCreate): The Message object to add containing the message content and type + + Returns: + schemas.Message: The Message object of the added message + + Raises: + HTTPException: If the session is not found + + """ + try: + return crud.create_metamessage(db, metamessage=metamessage, app_id=app_id, user_id=user_id, session_id=session_id, message_id=message_id) + except ValueError: + raise HTTPException(status_code=404, detail="Session not found") + +@router.get( + "/sessions/{session_id}/metamessages", + response_model=Page[schemas.Metamessage] +) +def get_metamessages( + request: Request, + app_id: str, + user_id: str, + session_id: uuid.UUID, + message_id: Optional[uuid.UUID] = None, + metamessage_type: Optional[str] = None, + db: Session = Depends(get_db), +): + """Get all messages for a session + + Args: + app_id (str): The ID of the app representing the client application using honcho + user_id (str): The User ID representing the user, managed by the user + session_id (int): The ID of the Session to retrieve + + Returns: + list[schemas.Message]: List of Message objects + + Raises: + HTTPException: If the session is not found + + """ + try: + return paginate(db, crud.get_metamessages(db, app_id=app_id, user_id=user_id, session_id=session_id, message_id=message_id, metamessage_type=metamessage_type)) + except ValueError: + raise HTTPException(status_code=404, detail="Session not found") + +@router.get("/sessions/{session_id}/metamessages/{metamessage_id}", response_model=schemas.Metamessage) +def get_metamessage(request: Request, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID, metamessage_id: uuid.UUID, db: Session = Depends(get_db)): + """Get a specific session for a user by ID + + Args: + app_id (str): The ID of the app representing the client application using honcho + user_id (str): The User ID representing the user, managed by the user + session_id (int): The ID of the Session to retrieve + + Returns: + schemas.Session: The Session object of the requested Session + + Raises: + HTTPException: If the session is not found + """ + honcho_metamessage = crud.get_metamessage(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=message_id, metamessage_id=metamessage_id) + if honcho_metamessage is None: + raise HTTPException(status_code=404, detail="Session not found") + return honcho_metamessage + +app.include_router(router) diff --git a/api/src/models.py b/api/src/models.py index a269831..4371229 100644 --- a/api/src/models.py +++ b/api/src/models.py @@ -1,4 +1,5 @@ -from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DateTime +from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DateTime, Uuid +import uuid import datetime from sqlalchemy.orm import relationship, Mapped, mapped_column @@ -7,18 +8,12 @@ class Session(Base): __tablename__ = "sessions" - # id = Column(Integer, primary_key=True, index=True, autoincrement=True) - # user_id = Column(String, index=True) - # location_id = Column(String, index=True) - # is_active = Column(Boolean, default=True) - # session_data = Column(String) - # created_at = Column(DateTime, default=datetime.datetime.utcnow) - id: Mapped[int] = mapped_column(primary_key=True, index=True, autoincrement=True) - app_id: Mapped[str] = mapped_column(index=True) - user_id: Mapped[str] = mapped_column(index=True) - location_id: Mapped[str] = mapped_column(index=True) + id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + app_id: Mapped[str] = mapped_column(String(512), index=True) + user_id: Mapped[str] = mapped_column(String(512), index=True) + location_id: Mapped[str] = mapped_column(String(512), index=True) is_active: Mapped[bool] = mapped_column(default=True) - session_data: Mapped[str] + session_data: Mapped[str] created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) messages = relationship("Message", back_populates="session") @@ -28,33 +23,27 @@ def __repr__(self) -> str: class Message(Base): __tablename__ = "messages" - # id = Column(Integer, primary_key=True, index=True, autoincrement=True) - # session_id = Column(Integer, ForeignKey("sessions.id")) - # is_user = Column(Boolean) - # content = Column(String) - id: Mapped[int] = mapped_column(primary_key=True, index=True, autoincrement=True) - session_id: Mapped[int] = mapped_column(ForeignKey("sessions.id")) + id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + session_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("sessions.id")) is_user: Mapped[bool] - content: Mapped[str] # TODO add a max message length + content: Mapped[str] = mapped_column(String(65535)) + created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) session = relationship("Session", back_populates="messages") - metamessages = relationship("Metamessages", back_populates="message") + metamessages = relationship("Metamessage", back_populates="message") def __repr__(self) -> str: return f"Message(id={self.id}, session_id={self.session_id}, is_user={self.is_user}, content={self.content[10:]})" -# TODO: add metamessages data to messages -class Metamessages(Base): +class Metamessage(Base): __tablename__ = "metamessages" - id: Mapped[int] = mapped_column(primary_key=True, index=True, autoincrement=True) - metamessage_type: Mapped[str] # TODO add a max metamessages type length - content: Mapped[str] - # id = Column(Integer, primary_key=True, index=True, autoincrement=True) - message_id = Column(Integer, ForeignKey("messages.id")) - # metacognition_type = Column(String, index=True) - # content = Column(String) + id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + metamessage_type: Mapped[str] = mapped_column(String(512), index=True) + content: Mapped[str] = mapped_column(String(65535)) + message_id = Column(Uuid, ForeignKey("messages.id")) message = relationship("Message", back_populates="metamessages") + created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) def __repr__(self) -> str: return f"Metamessages(id={self.id}, message_id={self.message_id}, metamessage_type={self.metamessage_type}, content={self.content[10:]})" diff --git a/api/src/schemas.py b/api/src/schemas.py index 8f51564..7c89975 100644 --- a/api/src/schemas.py +++ b/api/src/schemas.py @@ -1,4 +1,6 @@ from pydantic import BaseModel +import datetime +import uuid class MessageBase(BaseModel): @@ -11,8 +13,9 @@ class MessageCreate(MessageBase): class Message(MessageBase): - session_id: int - id: int + session_id: uuid.UUID + id: uuid.UUID + created_at: datetime.datetime class Config: orm_mode = True @@ -32,29 +35,32 @@ class SessionUpdate(SessionBase): class Session(SessionBase): - id: int + id: uuid.UUID # messages: list[Message] is_active: bool user_id: str location_id: str app_id: str session_data: str + created_at: datetime.datetime class Config: orm_mode = True -class MetamessagesBase(BaseModel): +class MetamessageBase(BaseModel): metamessage_type: str content: str -class MetamessagesCreate(MetamessagesBase): +class MetamessageCreate(MetamessageBase): pass -class Metamessages(MetamessagesBase): - id: int +class Metamessage(MetamessageBase): + id: uuid.UUID + message_id: uuid.UUID + created_at: datetime.datetime class Config: orm_mode = True diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index 0d4920f..74b7660 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,4 +1,4 @@ -from .client import AsyncClient, AsyncSession, AsyncGetSessionResponse, AsyncGetMessageResponse -from .sync_client import Client, Session, GetSessionResponse, GetMessageResponse -from .schemas import Message +from .client import AsyncClient, AsyncSession, AsyncGetSessionPage, AsyncGetMessagePage +from .sync_client import Client, Session, GetSessionPage, GetMessagePage +from .schemas import Message, Metamessage from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index 19f2951..c75e993 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -1,16 +1,29 @@ import json +import uuid from typing import Dict, Optional import httpx -from .schemas import Message +from .schemas import Message, Metamessage -class AsyncGetSessionResponse: - def __init__(self, client, response: Dict): - self.client = client +class AsyncGetPage: + def __init__(self, response: Dict) -> None: self.total = response["total"] self.page = response["page"] self.page_size = response["size"] self.pages = response["pages"] - self.sessions = [ + self.items =[] + + async def next(self): + pass + +class AsyncGetSessionPage(AsyncGetPage): + def __init__(self, client, response: Dict): + super().__init__(response) + self.client = client + # self.total = response["total"] + # self.page = response["page"] + # self.page_size = response["size"] + # self.pages = response["pages"] + self.items = [ AsyncSession( client=client, id=session["id"], @@ -25,23 +38,25 @@ def __init__(self, client, response: Dict): async def next(self): if self.page >= self.pages: return None - user_id = self.sessions[0].user_id - location_id = self.sessions[0].location_id + user_id = self.items[0].user_id + location_id = self.items[0].location_id return await self.client.get_sessions(user_id, location_id, self.page + 1, self.page_size) -class AsyncGetMessageResponse: +class AsyncGetMessagePage(AsyncGetPage): def __init__(self, session, response: Dict): + super().__init__(response) self.session = session - self.total = response["total"] - self.page = response["page"] - self.page_size = response["size"] - self.pages = response["pages"] - self.messages = [ + # self.total = response["total"] + # self.page = response["page"] + # self.page_size = response["size"] + # self.pages = response["pages"] + self.items = [ Message( session_id=session.id, id=message["id"], is_user=message["is_user"], content=message["content"], + created_at=message["created_at"], ) for message in response["items"] ] @@ -51,6 +66,27 @@ async def next(self): return None return await self.session.get_messages((self.page + 1), self.page_size) +class AsyncGetMetamessagePage(AsyncGetPage): + def __init__(self, session, response: Dict) -> None: + super().__init__(response) + self.session = session + self.items = [ + Metamessage( + id=metamessage["id"], + message_id=metamessage["message_id"], + metamessage_type=metamessage["metamessage_type"], + content=metamessage["content"], + created_at=metamessage["created_at"], + ) + for metamessage in response["items"] + ] + + async def next(self): + if self.page >= self.pages: + return None + return await self.session.get_metamessages((self.page + 1), self.page_size) + + class AsyncClient: def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): @@ -63,7 +99,7 @@ def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): def common_prefix(self): return f"{self.base_url}/apps/{self.app_id}" - async def get_session(self, user_id: str, session_id: int): + async def get_session(self, user_id: str, session_id: uuid.UUID): """Get a specific session for a user by ID Args: @@ -76,6 +112,7 @@ async def get_session(self, user_id: str, session_id: int): """ url = f"{self.common_prefix}/users/{user_id}/sessions/{session_id}" response = await self.client.get(url) + response.raise_for_status() data = response.json() return AsyncSession( client=self, @@ -103,7 +140,7 @@ async def get_sessions(self, user_id: str, location_id: Optional[str] = None, pa response = await self.client.get(url) response.raise_for_status() data = response.json() - return AsyncGetSessionResponse(self, data) + return AsyncGetSessionPage(self, data) async def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): page = 1 @@ -111,7 +148,7 @@ async def get_sessions_generator(self, user_id: str, location_id: Optional[str] get_session_response = await self.get_sessions(user_id, location_id, page, page_size) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) - for session in get_session_response.sessions: + for session in get_session_response.items: yield session new_sessions = await get_session_response.next() @@ -137,6 +174,7 @@ async def create_session( data = {"location_id": location_id, "session_data": session_data} url = f"{self.common_prefix}/users/{user_id}/sessions" response = await self.client.post(url, json=data) + response.raise_for_status() data = response.json() return AsyncSession( self, @@ -152,7 +190,7 @@ class AsyncSession: def __init__( self, client: AsyncClient, - id: int, + id: uuid.UUID, user_id: str, location_id: str, session_data: dict | str, @@ -197,10 +235,27 @@ async def create_message(self, is_user: bool, content: str): data = {"is_user": is_user, "content": content} url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages" response = await self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content, created_at=data["created_at"]) + + async def get_message(self, message_id: uuid.UUID) -> Message: + """Get a specific message for a session based on ID + + Args: + message_id (uuid.UUID): The ID of the Message to retrieve + + Returns: + Message: The Message object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages/{message_id}" + response = await self.client.get(url) + response.raise_for_status() data = response.json() - return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content) + return Message(session_id=self.id, id=data["id"], is_user=data["is_user"], content=data["content"], created_at=data["created_at"]) - async def get_messages(self, page: int = 1, page_size: int = 50) -> AsyncGetMessageResponse: + async def get_messages(self, page: int = 1, page_size: int = 50) -> AsyncGetMessagePage: """Get all messages for a session Args: @@ -215,23 +270,63 @@ async def get_messages(self, page: int = 1, page_size: int = 50) -> AsyncGetMess response = await self.client.get(url) response.raise_for_status() data = response.json() - return AsyncGetMessageResponse(self, data) - # return [ - # Message( - # self, - # id=message["id"], - # is_user=message["is_user"], - # content=message["content"], - # ) - # for message in data - # ] + return AsyncGetMessagePage(self, data) + async def get_messages_generator(self): page = 1 page_size = 50 - get_messages_response = await self.get_messages(page, page_size) + get_messages_page= await self.get_messages(page, page_size) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) - for message in get_messages_response.messages: + for message in get_messages_page.items: + yield message + + new_messages = await get_messages_page.next() + if not new_messages: + break + + get_messages_page = new_messages + + async def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: + """Get a specific message for a session based on ID + + Args: + message_id (uuid.UUID): The ID of the Message to retrieve + + Returns: + Message: The Message object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages/{metamessage_id}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return Metamessage(id=data["id"], message_id=data["message_id"], metamessage_type=data["metamessage_type"], content=data["content"], created_at=data["created_at"]) + + async def get_metamessages(self, page: int = 1, page_size: int = 50) -> AsyncGetMetamessagePage: + """Get all messages for a session + + Args: + user_id (str): The User ID representing the user, managed by the user + session_id (int): The ID of the Session to retrieve + + Returns: + list[Dict]: List of Message objects + + """ + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncGetMetamessagePage(self, data) + + async def get_metamessages_generator(self): + page = 1 + page_size = 50 + get_messages_response = await self.get_metamessages(page, page_size) + while True: + # get_session_response = self.get_sessions(user_id, location_id, page, page_size) + for message in get_messages_response.items: yield message new_messages = await get_messages_response.next() @@ -239,12 +334,13 @@ async def get_messages_generator(self): break get_messages_response = new_messages + async def update(self, session_data: Dict): - """Update the metadata of a session + """Update the session_data of a session Args: - session_data (Dict): The Session object containing any new metadata + session_data (Dict): The Session object containing any new session_data Returns: @@ -257,20 +353,9 @@ async def update(self, session_data: Dict): self.session_data = session_data return success - async def delete(self): - """Delete a session by marking it as inactive""" + async def close(self): + """Closes a session by marking it as inactive""" url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = await self.client.delete(url) self._is_active = False - -# class AsyncMessage: -# def __init__(self, session: AsyncSession, id: int, is_user: bool, content: str): -# """Constructor for Message""" -# self.session = session -# self.id = id -# self.is_user = is_user -# self.content = content - -# def __str__(self): -# return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" diff --git a/sdk/honcho/schemas.py b/sdk/honcho/schemas.py index a35e668..d2970c9 100644 --- a/sdk/honcho/schemas.py +++ b/sdk/honcho/schemas.py @@ -1,12 +1,26 @@ - +import uuid +import datetime class Message: - def __init__(self, session_id: int, id: int, is_user: bool, content: str): + def __init__(self, session_id: uuid.UUID, id: uuid.UUID, is_user: bool, content: str, created_at: datetime.datetime): """Constructor for Message""" self.session_id = session_id self.id = id self.is_user = is_user self.content = content + self.created_at = created_at def __str__(self): return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" + +class Metamessage: + def __init__(self, id: uuid.UUID, message_id: uuid.UUID, metamessage_type: str, content: str, created_at: datetime.datetime): + """Constructor for Metamessage""" + self.id = id + self.message_id = message_id + self.metamessage_type = metamessage_type + self.content = content + self.created_at = created_at + + def __str__(self): + return f"Metamessage(id={self.id}, message_id={self.message_id}, metamessage_type={self.metamessage_type}, content={self.content})" diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index b317700..dcf3146 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -1,16 +1,29 @@ import json +import uuid from typing import Dict, Optional import httpx -from .schemas import Message +from .schemas import Message, Metamessage -class GetSessionResponse: - def __init__(self, client, response: Dict): - self.client = client +class GetPage: + def __init__(self, response: Dict) -> None: self.total = response["total"] self.page = response["page"] self.page_size = response["size"] self.pages = response["pages"] - self.sessions = [ + self.items =[] + + def next(self): + pass + +class GetSessionPage(GetPage): + def __init__(self, client, response: Dict): + super().__init__(response) + self.client = client + # self.total = response["total"] + # self.page = response["page"] + # self.page_size = response["size"] + # self.pages = response["pages"] + self.items = [ Session( client=client, id=session["id"], @@ -25,23 +38,25 @@ def __init__(self, client, response: Dict): def next(self): if self.page >= self.pages: return None - user_id = self.sessions[0].user_id - location_id = self.sessions[0].location_id + user_id = self.items[0].user_id + location_id = self.items[0].location_id return self.client.get_sessions(user_id, location_id, self.page + 1, self.page_size) -class GetMessageResponse: +class GetMessagePage(GetPage): def __init__(self, session, response: Dict): + super().__init__(response) self.session = session - self.total = response["total"] - self.page = response["page"] - self.page_size = response["size"] - self.pages = response["pages"] - self.messages = [ + # self.total = response["total"] + # self.page = response["page"] + # self.page_size = response["size"] + # self.pages = response["pages"] + self.items = [ Message( session_id=session.id, id=message["id"], is_user=message["is_user"], content=message["content"], + created_at=message["created_at"], ) for message in response["items"] ] @@ -51,6 +66,27 @@ def next(self): return None return self.session.get_messages((self.page + 1), self.page_size) +class GetMetamessagePage(GetPage): + def __init__(self, session, response: Dict) -> None: + super().__init__(response) + self.session = session + self.items = [ + Metamessage( + id=metamessage["id"], + message_id=metamessage["message_id"], + metamessage_type=metamessage["metamessage_type"], + content=metamessage["content"], + created_at=metamessage["created_at"], + ) + for metamessage in response["items"] + ] + + def next(self): + if self.page >= self.pages: + return None + return self.session.get_metamessages((self.page + 1), self.page_size) + + class Client: def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): @@ -63,7 +99,7 @@ def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): def common_prefix(self): return f"{self.base_url}/apps/{self.app_id}" - def get_session(self, user_id: str, session_id: int): + def get_session(self, user_id: str, session_id: uuid.UUID): """Get a specific session for a user by ID Args: @@ -76,6 +112,7 @@ def get_session(self, user_id: str, session_id: int): """ url = f"{self.common_prefix}/users/{user_id}/sessions/{session_id}" response = self.client.get(url) + response.raise_for_status() data = response.json() return Session( client=self, @@ -103,7 +140,7 @@ def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: in response = self.client.get(url) response.raise_for_status() data = response.json() - return GetSessionResponse(self, data) + return GetSessionPage(self, data) def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): page = 1 @@ -111,7 +148,7 @@ def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None get_session_response = self.get_sessions(user_id, location_id, page, page_size) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) - for session in get_session_response.sessions: + for session in get_session_response.items: yield session new_sessions = get_session_response.next() @@ -137,6 +174,7 @@ def create_session( data = {"location_id": location_id, "session_data": session_data} url = f"{self.common_prefix}/users/{user_id}/sessions" response = self.client.post(url, json=data) + response.raise_for_status() data = response.json() return Session( self, @@ -152,7 +190,7 @@ class Session: def __init__( self, client: Client, - id: int, + id: uuid.UUID, user_id: str, location_id: str, session_data: dict | str, @@ -197,10 +235,27 @@ def create_message(self, is_user: bool, content: str): data = {"is_user": is_user, "content": content} url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages" response = self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content, created_at=data["created_at"]) + + def get_message(self, message_id: uuid.UUID) -> Message: + """Get a specific message for a session based on ID + + Args: + message_id (uuid.UUID): The ID of the Message to retrieve + + Returns: + Message: The Message object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages/{message_id}" + response = self.client.get(url) + response.raise_for_status() data = response.json() - return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content) + return Message(session_id=self.id, id=data["id"], is_user=data["is_user"], content=data["content"], created_at=data["created_at"]) - def get_messages(self, page: int = 1, page_size: int = 50) -> GetMessageResponse: + def get_messages(self, page: int = 1, page_size: int = 50) -> GetMessagePage: """Get all messages for a session Args: @@ -215,23 +270,63 @@ def get_messages(self, page: int = 1, page_size: int = 50) -> GetMessageResponse response = self.client.get(url) response.raise_for_status() data = response.json() - return GetMessageResponse(self, data) - # return [ - # Message( - # self, - # id=message["id"], - # is_user=message["is_user"], - # content=message["content"], - # ) - # for message in data - # ] + return GetMessagePage(self, data) + def get_messages_generator(self): page = 1 page_size = 50 - get_messages_response = self.get_messages(page, page_size) + get_messages_page= self.get_messages(page, page_size) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) - for message in get_messages_response.messages: + for message in get_messages_page.items: + yield message + + new_messages = get_messages_page.next() + if not new_messages: + break + + get_messages_page = new_messages + + def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: + """Get a specific message for a session based on ID + + Args: + message_id (uuid.UUID): The ID of the Message to retrieve + + Returns: + Message: The Message object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages/{metamessage_id}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return Metamessage(id=data["id"], message_id=data["message_id"], metamessage_type=data["metamessage_type"], content=data["content"], created_at=data["created_at"]) + + def get_metamessages(self, page: int = 1, page_size: int = 50) -> GetMetamessagePage: + """Get all messages for a session + + Args: + user_id (str): The User ID representing the user, managed by the user + session_id (int): The ID of the Session to retrieve + + Returns: + list[Dict]: List of Message objects + + """ + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return GetMetamessagePage(self, data) + + def get_metamessages_generator(self): + page = 1 + page_size = 50 + get_messages_response = self.get_metamessages(page, page_size) + while True: + # get_session_response = self.get_sessions(user_id, location_id, page, page_size) + for message in get_messages_response.items: yield message new_messages = get_messages_response.next() @@ -239,12 +334,13 @@ def get_messages_generator(self): break get_messages_response = new_messages + def update(self, session_data: Dict): - """Update the metadata of a session + """Update the session_data of a session Args: - session_data (Dict): The Session object containing any new metadata + session_data (Dict): The Session object containing any new session_data Returns: @@ -257,20 +353,9 @@ def update(self, session_data: Dict): self.session_data = session_data return success - def delete(self): - """Delete a session by marking it as inactive""" + def close(self): + """Closes a session by marking it as inactive""" url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = self.client.delete(url) self._is_active = False - -# class Message: -# def __init__(self, session: Session, id: int, is_user: bool, content: str): -# """Constructor for Message""" -# self.session = session -# self.id = id -# self.is_user = is_user -# self.content = content - -# def __str__(self): -# return f"Message(id={self.id}, is_user={self.is_user}, content={self.content})" diff --git a/sdk/tests/test_async.py b/sdk/tests/test_async.py index d724b12..097911f 100644 --- a/sdk/tests/test_async.py +++ b/sdk/tests/test_async.py @@ -1,5 +1,5 @@ import pytest -from honcho import AsyncGetSessionResponse, AsyncGetMessageResponse, AsyncSession, Message +from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncSession, Message from honcho import AsyncClient as Honcho from uuid import uuid1 @@ -25,7 +25,7 @@ async def test_session_multiple_retrieval(): created_session_1 = await client.create_session(user_id) created_session_2 = await client.create_session(user_id) response = await client.get_sessions(user_id) - retrieved_sessions = response.sessions + retrieved_sessions = response.items assert len(retrieved_sessions) == 2 assert retrieved_sessions[0].id == created_session_1.id @@ -50,7 +50,7 @@ async def test_session_deletion(): client = Honcho(app_id, "http://localhost:8000") created_session = await client.create_session(user_id) assert created_session.is_active is True - await created_session.delete() + await created_session.close() assert created_session.is_active is False retrieved_session = await client.get_session(user_id, created_session.id) assert retrieved_session.is_active is False @@ -67,7 +67,7 @@ async def test_messages(): await created_session.create_message(is_user=False, content="Hi") retrieved_session = await client.get_session(user_id, created_session.id) response = await retrieved_session.get_messages() - messages = response.messages + messages = response.items assert len(messages) == 2 user_message, ai_message = messages assert user_message.content == "Hello" @@ -111,18 +111,18 @@ async def test_paginated_sessions(): page = 1 page_size = 2 get_session_response = await client.get_sessions(user_id, page=page, page_size=page_size) - assert len(get_session_response.sessions) == page_size + assert len(get_session_response.items) == page_size assert get_session_response.pages == 5 new_session_response = await get_session_response.next() assert new_session_response is not None - assert isinstance(new_session_response, AsyncGetSessionResponse) - assert len(new_session_response.sessions) == page_size + assert isinstance(new_session_response, AsyncGetSessionPage) + assert len(new_session_response.items) == page_size final_page = await client.get_sessions(user_id, page=5, page_size=page_size) - assert len(final_page.sessions) == 2 + assert len(final_page.items) == 2 next_page = await final_page.next() assert next_page is None @@ -161,7 +161,7 @@ async def test_paginated_out_of_bounds(): assert get_session_response.page == 2 assert get_session_response.page_size == 50 assert get_session_response.total == 3 - assert len(get_session_response.sessions) == 0 + assert len(get_session_response.items) == 0 @pytest.mark.asyncio @@ -178,18 +178,18 @@ async def test_paginated_messages(): get_message_response = await created_session.get_messages(page=1, page_size=page_size) assert get_message_response is not None - assert isinstance(get_message_response, AsyncGetMessageResponse) - assert len(get_message_response.messages) == page_size + assert isinstance(get_message_response, AsyncGetMessagePage) + assert len(get_message_response.items) == page_size new_message_response = await get_message_response.next() assert new_message_response is not None - assert isinstance(new_message_response, AsyncGetMessageResponse) - assert len(new_message_response.messages) == page_size + assert isinstance(new_message_response, AsyncGetMessagePage) + assert len(new_message_response.items) == page_size final_page = await created_session.get_messages(page=3, page_size=page_size) - assert len(final_page.messages) == 20 - ((3-1) * 7) + assert len(final_page.items) == 20 - ((3-1) * 7) next_page = await final_page.next() diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index 06fbf3c..8daedec 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,4 +1,4 @@ -from honcho import GetSessionResponse, GetMessageResponse, Session, Message +from honcho import GetSessionPage, GetMessagePage, Session, Message from honcho import Client as Honcho from uuid import uuid1 import pytest @@ -22,7 +22,7 @@ def test_session_multiple_retrieval(): created_session_1 = client.create_session(user_id) created_session_2 = client.create_session(user_id) response = client.get_sessions(user_id) - retrieved_sessions = response.sessions + retrieved_sessions = response.items assert len(retrieved_sessions) == 2 assert retrieved_sessions[0].id == created_session_1.id @@ -45,7 +45,7 @@ def test_session_deletion(): client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) assert created_session.is_active is True - created_session.delete() + created_session.close() assert created_session.is_active is False retrieved_session = client.get_session(user_id, created_session.id) assert retrieved_session.is_active is False @@ -61,7 +61,7 @@ def test_messages(): created_session.create_message(is_user=False, content="Hi") retrieved_session = client.get_session(user_id, created_session.id) response = retrieved_session.get_messages() - messages = response.messages + messages = response.items assert len(messages) == 2 user_message, ai_message = messages assert user_message.content == "Hello" @@ -102,18 +102,18 @@ def test_paginated_sessions(): page = 1 page_size = 2 get_session_response = client.get_sessions(user_id, page=page, page_size=page_size) - assert len(get_session_response.sessions) == page_size + assert len(get_session_response.items) == page_size assert get_session_response.pages == 5 new_session_response = get_session_response.next() assert new_session_response is not None - assert isinstance(new_session_response, GetSessionResponse) - assert len(new_session_response.sessions) == page_size + assert isinstance(new_session_response, GetSessionPage) + assert len(new_session_response.items) == page_size final_page = client.get_sessions(user_id, page=5, page_size=page_size) - assert len(final_page.sessions) == 2 + assert len(final_page.items) == 2 next_page = final_page.next() assert next_page is None @@ -150,7 +150,7 @@ def test_paginated_out_of_bounds(): assert get_session_response.page == 2 assert get_session_response.page_size == 50 assert get_session_response.total == 3 - assert len(get_session_response.sessions) == 0 + assert len(get_session_response.items) == 0 def test_paginated_messages(): @@ -166,18 +166,18 @@ def test_paginated_messages(): get_message_response = created_session.get_messages(page=1, page_size=page_size) assert get_message_response is not None - assert isinstance(get_message_response, GetMessageResponse) - assert len(get_message_response.messages) == page_size + assert isinstance(get_message_response, GetMessagePage) + assert len(get_message_response.items) == page_size new_message_response = get_message_response.next() assert new_message_response is not None - assert isinstance(new_message_response, GetMessageResponse) - assert len(new_message_response.messages) == page_size + assert isinstance(new_message_response, GetMessagePage) + assert len(new_message_response.items) == page_size final_page = created_session.get_messages(page=3, page_size=page_size) - assert len(final_page.messages) == 20 - ((3-1) * 7) + assert len(final_page.items) == 20 - ((3-1) * 7) next_page = final_page.next() From 3926aaf8716dc4f50471e31e5139a8fb2b25c750 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 8 Feb 2024 08:52:35 -0800 Subject: [PATCH 11/46] Work with unit tests --- api/src/crud.py | 5 ++- api/src/main.py | 12 +++---- api/src/schemas.py | 2 +- sdk/honcho/__init__.py | 4 +-- sdk/honcho/client.py | 69 ++++++++++++++++++++++++++++++--------- sdk/honcho/sync_client.py | 69 ++++++++++++++++++++++++++++++--------- sdk/tests/test_async.py | 59 +++++++++++++++++++++++++++++++-- sdk/tests/test_sync.py | 57 +++++++++++++++++++++++++++++++- 8 files changed, 228 insertions(+), 49 deletions(-) diff --git a/api/src/crud.py b/api/src/crud.py index c0dbb24..a130892 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -170,14 +170,13 @@ def create_metamessage( app_id: str, user_id: str, session_id: uuid.UUID, - message_id: uuid.UUID, ): - message = get_message(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=message_id) + message = get_message(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=metamessage.message_id) if message is None: raise ValueError("Session not found or does not belong to user") honcho_metamessage = models.Metamessage( - message_id=message_id, + message_id=metamessage.message_id, metamessage_type=metamessage.metamessage_type, content=metamessage.content, ) diff --git a/api/src/main.py b/api/src/main.py index 747a11e..c3ef0d8 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -78,13 +78,7 @@ def create_session( schemas.Session: The Session object of the new Session """ - print("===============================") - print(request) - print("===============================") value = crud.create_session(db, app_id=app_id, user_id=user_id, session=session) - print("===============================") - print(value) - print("===============================") return value @router.put("/sessions/{session_id}", response_model=schemas.Session) @@ -264,7 +258,6 @@ def create_metamessage( app_id: str, user_id: str, session_id: uuid.UUID, - message_id: uuid.UUID, metamessage: schemas.MetamessageCreate, db: Session = Depends(get_db), ): @@ -283,8 +276,11 @@ def create_metamessage( HTTPException: If the session is not found """ + print("=======================") + print(request) + print("=======================") try: - return crud.create_metamessage(db, metamessage=metamessage, app_id=app_id, user_id=user_id, session_id=session_id, message_id=message_id) + return crud.create_metamessage(db, metamessage=metamessage, app_id=app_id, user_id=user_id, session_id=session_id) except ValueError: raise HTTPException(status_code=404, detail="Session not found") diff --git a/api/src/schemas.py b/api/src/schemas.py index 7c89975..b6bff90 100644 --- a/api/src/schemas.py +++ b/api/src/schemas.py @@ -54,7 +54,7 @@ class MetamessageBase(BaseModel): class MetamessageCreate(MetamessageBase): - pass + message_id: uuid.UUID class Metamessage(MetamessageBase): diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index 74b7660..e87b439 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,4 +1,4 @@ -from .client import AsyncClient, AsyncSession, AsyncGetSessionPage, AsyncGetMessagePage -from .sync_client import Client, Session, GetSessionPage, GetMessagePage +from .client import AsyncClient, AsyncSession, AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage +from .sync_client import Client, Session, GetSessionPage, GetMessagePage, GetMetamessagePage from .schemas import Message, Metamessage from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index c75e993..afb3c8e 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -16,9 +16,11 @@ async def next(self): pass class AsyncGetSessionPage(AsyncGetPage): - def __init__(self, client, response: Dict): + def __init__(self, client, options: Dict, response: Dict): super().__init__(response) self.client = client + self.user_id = options["user_id"] + self.location_id = options["location_id"] # self.total = response["total"] # self.page = response["page"] # self.page_size = response["size"] @@ -38,9 +40,9 @@ def __init__(self, client, response: Dict): async def next(self): if self.page >= self.pages: return None - user_id = self.items[0].user_id - location_id = self.items[0].location_id - return await self.client.get_sessions(user_id, location_id, self.page + 1, self.page_size) + # user_id = self.items[0].user_id + # location_id = self.items[0].location_id + return await self.client.get_sessions(self.user_id, self.location_id, self.page + 1, self.page_size) class AsyncGetMessagePage(AsyncGetPage): def __init__(self, session, response: Dict): @@ -67,9 +69,11 @@ async def next(self): return await self.session.get_messages((self.page + 1), self.page_size) class AsyncGetMetamessagePage(AsyncGetPage): - def __init__(self, session, response: Dict) -> None: + def __init__(self, session, options: Dict, response: Dict) -> None: super().__init__(response) self.session = session + self.message_id = options["message_id"] + self.metamessage_type = options["metamessage_type"] self.items = [ Metamessage( id=metamessage["id"], @@ -84,7 +88,7 @@ def __init__(self, session, response: Dict) -> None: async def next(self): if self.page >= self.pages: return None - return await self.session.get_metamessages((self.page + 1), self.page_size) + return await self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) @@ -140,7 +144,11 @@ async def get_sessions(self, user_id: str, location_id: Optional[str] = None, pa response = await self.client.get(url) response.raise_for_status() data = response.json() - return AsyncGetSessionPage(self, data) + options = { + "location_id": location_id, + "user_id": user_id + } + return AsyncGetSessionPage(self, options, data) async def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): page = 1 @@ -287,6 +295,27 @@ async def get_messages_generator(self): get_messages_page = new_messages + async def create_metamessage(self, message: Message, metamessage_type: str, content: str): + """Adds a metamessage to the session + + Args: + is_user (bool): Whether the message is from the user + content (str): The content of the message + + Returns: + Dict: The Message object of the added message + + """ + if not self.is_active: + raise Exception("Session is inactive") + data = {"metamessage_type": metamessage_type, "content": content, "message_id": message.id} + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" + response = await self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Metamessage(id=data["id"], message_id=message.id, metamessage_type=metamessage_type, content=content, created_at=data["created_at"]) + + async def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: """Get a specific message for a session based on ID @@ -303,7 +332,7 @@ async def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: data = response.json() return Metamessage(id=data["id"], message_id=data["message_id"], metamessage_type=data["metamessage_type"], content=data["content"], created_at=data["created_at"]) - async def get_metamessages(self, page: int = 1, page_size: int = 50) -> AsyncGetMetamessagePage: + async def get_metamessages(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None, page: int = 1, page_size: int = 50) -> AsyncGetMetamessagePage: """Get all messages for a session Args: @@ -314,26 +343,34 @@ async def get_metamessages(self, page: int = 1, page_size: int = 50) -> AsyncGet list[Dict]: List of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}" + if metamessage_type: + url += f"&metamessage_type={metamessage_type}" + if message: + url += f"&message_id={message.id}" response = await self.client.get(url) response.raise_for_status() data = response.json() - return AsyncGetMetamessagePage(self, data) + options = { + "metamessage_type": metamessage_type, + "message_id": message.id if message else None + } + return AsyncGetMetamessagePage(self, options, data) - async def get_metamessages_generator(self): + async def get_metamessages_generator(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None): page = 1 page_size = 50 - get_messages_response = await self.get_metamessages(page, page_size) + get_metamessages_page = await self.get_metamessages(metamessage_type=metamessage_type, message=message, page=page, page_size=page_size) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) - for message in get_messages_response.items: - yield message + for metamessage in get_metamessages_page.items: + yield metamessage - new_messages = await get_messages_response.next() + new_messages = await get_metamessages_page.next() if not new_messages: break - get_messages_response = new_messages + get_metamessages_page = new_messages async def update(self, session_data: Dict): diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index dcf3146..8c8da1e 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -16,9 +16,11 @@ def next(self): pass class GetSessionPage(GetPage): - def __init__(self, client, response: Dict): + def __init__(self, client, options: Dict, response: Dict): super().__init__(response) self.client = client + self.user_id = options["user_id"] + self.location_id = options["location_id"] # self.total = response["total"] # self.page = response["page"] # self.page_size = response["size"] @@ -38,9 +40,9 @@ def __init__(self, client, response: Dict): def next(self): if self.page >= self.pages: return None - user_id = self.items[0].user_id - location_id = self.items[0].location_id - return self.client.get_sessions(user_id, location_id, self.page + 1, self.page_size) + # user_id = self.items[0].user_id + # location_id = self.items[0].location_id + return self.client.get_sessions(self.user_id, self.location_id, self.page + 1, self.page_size) class GetMessagePage(GetPage): def __init__(self, session, response: Dict): @@ -67,9 +69,11 @@ def next(self): return self.session.get_messages((self.page + 1), self.page_size) class GetMetamessagePage(GetPage): - def __init__(self, session, response: Dict) -> None: + def __init__(self, session, options: Dict, response: Dict) -> None: super().__init__(response) self.session = session + self.message_id = options["message_id"] + self.metamessage_type = options["metamessage_type"] self.items = [ Metamessage( id=metamessage["id"], @@ -84,7 +88,7 @@ def __init__(self, session, response: Dict) -> None: def next(self): if self.page >= self.pages: return None - return self.session.get_metamessages((self.page + 1), self.page_size) + return self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) @@ -140,7 +144,11 @@ def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: in response = self.client.get(url) response.raise_for_status() data = response.json() - return GetSessionPage(self, data) + options = { + "location_id": location_id, + "user_id": user_id + } + return GetSessionPage(self, options, data) def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): page = 1 @@ -287,6 +295,27 @@ def get_messages_generator(self): get_messages_page = new_messages + def create_metamessage(self, message: Message, metamessage_type: str, content: str): + """Adds a metamessage to the session + + Args: + is_user (bool): Whether the message is from the user + content (str): The content of the message + + Returns: + Dict: The Message object of the added message + + """ + if not self.is_active: + raise Exception("Session is inactive") + data = {"metamessage_type": metamessage_type, "content": content, "message_id": message.id} + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" + response = self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Metamessage(id=data["id"], message_id=message.id, metamessage_type=metamessage_type, content=content, created_at=data["created_at"]) + + def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: """Get a specific message for a session based on ID @@ -303,7 +332,7 @@ def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: data = response.json() return Metamessage(id=data["id"], message_id=data["message_id"], metamessage_type=data["metamessage_type"], content=data["content"], created_at=data["created_at"]) - def get_metamessages(self, page: int = 1, page_size: int = 50) -> GetMetamessagePage: + def get_metamessages(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None, page: int = 1, page_size: int = 50) -> GetMetamessagePage: """Get all messages for a session Args: @@ -314,26 +343,34 @@ def get_metamessages(self, page: int = 1, page_size: int = 50) -> GetMetamessage list[Dict]: List of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}" + if metamessage_type: + url += f"&metamessage_type={metamessage_type}" + if message: + url += f"&message_id={message.id}" response = self.client.get(url) response.raise_for_status() data = response.json() - return GetMetamessagePage(self, data) + options = { + "metamessage_type": metamessage_type, + "message_id": message.id if message else None + } + return GetMetamessagePage(self, options, data) - def get_metamessages_generator(self): + def get_metamessages_generator(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None): page = 1 page_size = 50 - get_messages_response = self.get_metamessages(page, page_size) + get_metamessages_page = self.get_metamessages(metamessage_type=metamessage_type, message=message, page=page, page_size=page_size) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) - for message in get_messages_response.items: - yield message + for metamessage in get_metamessages_page.items: + yield metamessage - new_messages = get_messages_response.next() + new_messages = get_metamessages_page.next() if not new_messages: break - get_messages_response = new_messages + get_metamessages_page = new_messages def update(self, session_data: Dict): diff --git a/sdk/tests/test_async.py b/sdk/tests/test_async.py index 097911f..8e5904d 100644 --- a/sdk/tests/test_async.py +++ b/sdk/tests/test_async.py @@ -1,5 +1,5 @@ import pytest -from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncSession, Message +from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncSession, Message, Metamessage from honcho import AsyncClient as Honcho from uuid import uuid1 @@ -195,7 +195,6 @@ async def test_paginated_messages(): assert next_page is None - @pytest.mark.asyncio async def test_paginated_messages_generator(): app_id = str(uuid1()) @@ -217,3 +216,59 @@ async def test_paginated_messages_generator(): with pytest.raises(StopAsyncIteration): await gen.__anext__() +@pytest.mark.asyncio +async def test_paginated_metamessages(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + created_session = await client.create_session(user_id) + message = await created_session.create_message(is_user=True, content="Hello") + for i in range(10): + await created_session.create_metamessage(message=message, metamessage_type="thought", content=f"Test {i}") + await created_session.create_metamessage(message=message, metamessage_type="reflect", content=f"Test {i}") + + page_size = 7 + page = await created_session.get_metamessages(page=1, page_size=page_size) + + assert page is not None + assert isinstance(page, AsyncGetMetamessagePage) + assert len(page.items) == page_size + + new_page = await page.next() + + assert new_page is not None + assert isinstance(new_page, AsyncGetMetamessagePage) + assert len(new_page.items) == page_size + + final_page = await created_session.get_metamessages(page=3, page_size=page_size) + + assert len(final_page.items) == 20 - ((3-1) * 7) + + next_page = await final_page.next() + + assert next_page is None + +@pytest.mark.asyncio +async def test_paginated_metamessages_generator(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + created_session = await client.create_session(user_id) + message = await created_session.create_message(is_user=True, content="Hello") + await created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 1") + await created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 2") + gen = created_session.get_metamessages_generator() + + item = await gen.__anext__() + assert isinstance(item, Metamessage) + assert item.content == "Test 1" + assert item.metamessage_type == "thought" + item2 = await gen.__anext__() + assert item2 is not None + assert item2.content == "Test 2" + assert item2.metamessage_type == "thought" + with pytest.raises(StopAsyncIteration): + await gen.__anext__() + + + diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index 8daedec..135cc61 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,4 +1,4 @@ -from honcho import GetSessionPage, GetMessagePage, Session, Message +from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, Session, Message, Metamessage from honcho import Client as Honcho from uuid import uuid1 import pytest @@ -204,3 +204,58 @@ def test_paginated_messages_generator(): with pytest.raises(StopIteration): next(gen) + +def test_paginated_metamessages(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + created_session = client.create_session(user_id) + message = created_session.create_message(is_user=True, content="Hello") + for i in range(10): + created_session.create_metamessage(message=message, metamessage_type="thought", content=f"Test {i}") + created_session.create_metamessage(message=message, metamessage_type="reflect", content=f"Test {i}") + + page_size = 7 + page = created_session.get_metamessages(page=1, page_size=page_size) + + assert page is not None + assert isinstance(page, GetMetamessagePage) + assert len(page.items) == page_size + + new_page = page.next() + + assert new_page is not None + assert isinstance(new_page, GetMetamessagePage) + assert len(new_page.items) == page_size + + final_page = created_session.get_metamessages(page=3, page_size=page_size) + + assert len(final_page.items) == 20 - ((3-1) * 7) + + next_page = final_page.next() + + assert next_page is None + +def test_paginated_metamessages_generator(): + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + created_session = client.create_session(user_id) + message = created_session.create_message(is_user=True, content="Hello") + created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 1") + created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 2") + gen = created_session.get_metamessages_generator() + + item = next(gen) + assert isinstance(item, Metamessage) + assert item.content == "Test 1" + assert item.metamessage_type == "thought" + item2 = next(gen) + assert item2 is not None + assert item2.content == "Test 2" + assert item2.metamessage_type == "thought" + with pytest.raises(StopIteration): + next(gen) + + + From ddaa3114ce7712a96e17f5463f1d25bbeaff5f0d Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:04:43 -0800 Subject: [PATCH 12/46] Fix Examples --- example/cli/main.py | 2 +- example/discord/fake-llm/main.py | 2 +- example/discord/fake-llm/poetry.lock | 276 ++++++++++++++++------- example/discord/simple-roast-bot/main.py | 2 +- 4 files changed, 196 insertions(+), 86 deletions(-) diff --git a/example/cli/main.py b/example/cli/main.py index e1aa3fd..12e01aa 100644 --- a/example/cli/main.py +++ b/example/cli/main.py @@ -36,7 +36,7 @@ def chat(): while True: user_input = input("User: ") if user_input == "exit": - session.delete() + session.close() break user_message = HumanMessage(content=user_input) history = list(session.get_messages_generator()) diff --git a/example/discord/fake-llm/main.py b/example/discord/fake-llm/main.py index fd81260..6ae6a35 100644 --- a/example/discord/fake-llm/main.py +++ b/example/discord/fake-llm/main.py @@ -52,7 +52,7 @@ async def restart(ctx): user_id = f"discord_{str(ctx.author.id)}" location_id = str(ctx.channel_id) sessions = list(honcho.get_sessions_generator(user_id, location_id)) - sessions[0].delete() if len(sessions) > 0 else None + sessions[0].close() if len(sessions) > 0 else None await ctx.respond( "Great! The conversation has been restarted. What would you like to talk about?" diff --git a/example/discord/fake-llm/poetry.lock b/example/discord/fake-llm/poetry.lock index fbabb69..d66136d 100644 --- a/example/discord/fake-llm/poetry.lock +++ b/example/discord/fake-llm/poetry.lock @@ -124,6 +124,27 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "async-timeout" version = "4.0.3" @@ -158,14 +179,14 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -355,6 +376,18 @@ files = [ {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + [[package]] name = "honcho-ai" version = "0.0.2" @@ -366,11 +399,59 @@ files = [] develop = true [package.dependencies] +httpx = "^0.26.0" requests = "^2.31.0" [package.source] type = "directory" -url = "../../sdk" +url = "../../../sdk" + +[[package]] +name = "httpcore" +version = "1.0.2" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] +trio = ["trio (>=0.22.0,<0.23.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = ">=1.0.0,<2.0.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "idna" @@ -386,86 +467,102 @@ files = [ [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] @@ -525,20 +622,33 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -649,4 +759,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "56228d417333540e3191575720739ae6fff9490b68e9ddaae2cb6fe44b4bf611" +content-hash = "f31c071455001b66fe72eb743b16c64f108172c4314475b772e5cbd18942d0dc" diff --git a/example/discord/simple-roast-bot/main.py b/example/discord/simple-roast-bot/main.py index 0797607..9cb1e7f 100644 --- a/example/discord/simple-roast-bot/main.py +++ b/example/discord/simple-roast-bot/main.py @@ -82,7 +82,7 @@ async def restart(ctx): user_id=f"discord_{str(ctx.author.id)}" location_id=str(ctx.channel_id) sessions = list(honcho.get_sessions_generator(user_id, location_id)) - sessions[0].delete() if len(sessions) > 0 else None + sessions[0].close() if len(sessions) > 0 else None msg = "Great! The conversation has been restarted. What would you like to talk about?" await ctx.respond(msg) From a7373d19cb58528a1aae51b78c0eb1332e4614d2 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:37:35 -0800 Subject: [PATCH 13/46] MEME-78 Update Changelogs --- README.md | 2 +- api/CHANGELOG.md | 20 ++++++ api/pyproject.toml | 4 +- sdk/CHANGELOG.md | 25 +++++++ sdk/poetry.lock | 164 ++++++--------------------------------------- sdk/pyproject.toml | 1 - 6 files changed, 70 insertions(+), 146 deletions(-) diff --git a/README.md b/README.md index 49a8c1d..47c9441 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Honcho -![Static Badge](https://img.shields.io/badge/Version-0.0.1-blue) +![Static Badge](https://img.shields.io/badge/Version-0.0.2-blue) [![Discord](https://img.shields.io/discord/1016845111637839922?style=flat&logo=discord&logoColor=23ffffff&label=Plastic%20Labs&labelColor=235865F2)](https://discord.gg/plasticlabs) ![GitHub License](https://img.shields.io/github/license/plastic-labs/honcho) ![GitHub Repo stars](https://img.shields.io/github/stars/plastic-labs/honcho) diff --git a/api/CHANGELOG.md b/api/CHANGELOG.md index 722f7a7..56d7915 100644 --- a/api/CHANGELOG.md +++ b/api/CHANGELOG.md @@ -4,6 +4,26 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.2] — 2024-02-01 + +### Added + +* Pagination for requests via `fastapi_pagination` +* Metamessages +* `get_message` routes +* `created_at` field added to each Table +* Message size limits + +### Changed + +* IDs are now UUIDs +* default rate limit now 100 requests per minute + +### Removed + +* Removed messages from session response model + + ## [0.0.1] — 2024-02-01 ### Added diff --git a/api/pyproject.toml b/api/pyproject.toml index bcf746e..6777d84 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "honcho" -version = "0.0.1" -description = "" +version = "0.0.2" +description = "Honcho Server" authors = ["Plastic Labs "] readme = "README.md" diff --git a/sdk/CHANGELOG.md b/sdk/CHANGELOG.md index 8a6da89..1d8ecd5 100644 --- a/sdk/CHANGELOG.md +++ b/sdk/CHANGELOG.md @@ -6,6 +6,31 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.2] — 2024-02-08 + +### Added + +* Async client +* Metamessages introduced +* Paginated results for get requests +* `created_at` field added to Messages and Metamessages +* added singular `get_message` method +* Size limits for messages and string fields + +### Changed + +* Default rate limit of 100/minutes +* Changed default ID type to use UUIDs +* `session.delete()` is now `session.close()` +* replace `requests` for `httpx` + + +### Removed + +* Removed messages from session response model + + + ## [0.0.1] — 2024-02-01 ### Added diff --git a/sdk/poetry.lock b/sdk/poetry.lock index 50f59b6..e450ca8 100644 --- a/sdk/poetry.lock +++ b/sdk/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. [[package]] name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -26,6 +27,7 @@ trio = ["trio (>=0.23)"] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -33,109 +35,11 @@ files = [ {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -147,6 +51,7 @@ files = [ name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -161,6 +66,7 @@ test = ["pytest (>=6)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -172,6 +78,7 @@ files = [ name = "httpcore" version = "1.0.2" description = "A minimal low-level HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -186,13 +93,14 @@ h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" version = "0.26.0" description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -203,20 +111,21 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = "==1.*" +httpcore = ">=1.0.0,<2.0.0" idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -228,6 +137,7 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -239,6 +149,7 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -250,6 +161,7 @@ files = [ name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -265,6 +177,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -287,6 +200,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.23.4" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -301,31 +215,11 @@ pytest = ">=7.0.0,<8" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - [[package]] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -337,6 +231,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -348,6 +243,7 @@ files = [ name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -355,23 +251,7 @@ files = [ {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] -[[package]] -name = "urllib3" -version = "2.1.0" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "d5dbd21023598b83062e7705f329579b9175f4da4db66559ea84399246cfcc25" +content-hash = "6ccea662fa5a5bae88618123d5d05e0d4955c234b7e1a688d2fae2f90cd9f7f8" diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index cb66980..ff695b5 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -9,7 +9,6 @@ packages = [{include = "honcho"}] [tool.poetry.dependencies] python = "^3.10" -requests = "^2.31.0" httpx = "^0.26.0" [tool.poetry.group.test.dependencies] From f99f1e8d0475c144ced57aebca01bc912f1ea52d Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 8 Feb 2024 10:50:40 -0800 Subject: [PATCH 14/46] Docstrings to client --- sdk/honcho/client.py | 122 ++++++++++++++++++++++++++++++-------- sdk/honcho/sync_client.py | 120 +++++++++++++++++++++++++++++-------- 2 files changed, 193 insertions(+), 49 deletions(-) diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index afb3c8e..c5a1373 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -5,7 +5,13 @@ from .schemas import Message, Metamessage class AsyncGetPage: + """Base class for receiving Paginated API results""" def __init__(self, response: Dict) -> None: + """Constructor for Page with relevant information about the results and pages + + Args: + response (Dict): Response from API with pagination information + """ self.total = response["total"] self.page = response["page"] self.page_size = response["size"] @@ -13,18 +19,24 @@ def __init__(self, response: Dict) -> None: self.items =[] async def next(self): + """Shortcut method to Get the next page of results""" pass class AsyncGetSessionPage(AsyncGetPage): + """Paginated Results for Get Session Requests""" + def __init__(self, client, options: Dict, response: Dict): + """Constructor for Page Result from Session Get Request + + Args: + client (AsyncClient): Honcho Client + options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are user_id which is required and location_id which is optional + response (Dict): Response from API with pagination information + """ super().__init__(response) self.client = client self.user_id = options["user_id"] self.location_id = options["location_id"] - # self.total = response["total"] - # self.page = response["page"] - # self.page_size = response["size"] - # self.pages = response["pages"] self.items = [ AsyncSession( client=client, @@ -38,20 +50,26 @@ def __init__(self, client, options: Dict, response: Dict): ] async def next(self): + """Get the next page of results + Returns: + AsyncGetSessionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ if self.page >= self.pages: return None - # user_id = self.items[0].user_id - # location_id = self.items[0].location_id return await self.client.get_sessions(self.user_id, self.location_id, self.page + 1, self.page_size) class AsyncGetMessagePage(AsyncGetPage): + """Paginated Results for Get Session Requests""" + def __init__(self, session, response: Dict): + """Constructor for Page Result from Session Get Request + + Args: + session (AsyncSession): Session the returned messages are associated with + response (Dict): Response from API with pagination information + """ super().__init__(response) self.session = session - # self.total = response["total"] - # self.page = response["page"] - # self.page_size = response["size"] - # self.pages = response["pages"] self.items = [ Message( session_id=session.id, @@ -64,12 +82,24 @@ def __init__(self, session, response: Dict): ] async def next(self): + """Get the next page of results + Returns: + AsyncGetMessagePage | None: Next Page of Results or None if there are no more messages to retreive from a query + """ if self.page >= self.pages: return None return await self.session.get_messages((self.page + 1), self.page_size) class AsyncGetMetamessagePage(AsyncGetPage): + def __init__(self, session, options: Dict, response: Dict) -> None: + """Constructor for Page Result from Metamessage Get Request + + Args: + session (AsyncSession): Session the returned messages are associated with + options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both required + response (Dict): Response from API with pagination information + """ super().__init__(response) self.session = session self.message_id = options["message_id"] @@ -86,6 +116,10 @@ def __init__(self, session, options: Dict, response: Dict) -> None: ] async def next(self): + """Get the next page of results + Returns: + AsyncGetMetamessagePage | None: Next Page of Results or None if there are no more metamessages to retreive from a query + """ if self.page >= self.pages: return None return await self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) @@ -93,6 +127,8 @@ async def next(self): class AsyncClient: + """Honcho API Client Object""" + def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): """Constructor for Client""" self.base_url = base_url # Base URL for the instance of the Honcho API @@ -101,6 +137,7 @@ def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): @property def common_prefix(self): + """Shorcut for common API prefix. made a property to prevent tampering""" return f"{self.base_url}/apps/{self.app_id}" async def get_session(self, user_id: str, session_id: uuid.UUID): @@ -108,10 +145,10 @@ async def get_session(self, user_id: str, session_id: uuid.UUID): Args: user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to retrieve + session_id (uuid.UUID): The ID of the Session to retrieve Returns: - Dict: The Session object of the requested Session + AsyncSession: The Session object of the requested Session """ url = f"{self.common_prefix}/users/{user_id}/sessions/{session_id}" @@ -128,14 +165,16 @@ async def get_session(self, user_id: str, session_id: uuid.UUID): ) async def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): - """Return sessions associated with a user + """Return sessions associated with a user paginated Args: user_id (str): The User ID representing the user, managed by the user location_id (str, optional): Optional Location ID representing the location of a session + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return Returns: - list[Dict]: List of Session objects + AsyncGetSessionPage: Page or results for get_sessions query """ url = f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}" + ( @@ -151,6 +190,16 @@ async def get_sessions(self, user_id: str, location_id: Optional[str] = None, pa return AsyncGetSessionPage(self, options, data) async def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): + """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + + Args: + user_id (str): The User ID representing the user, managed by the user + location_id (str, optional): Optional Location ID representing the location of a session + + Yields: + AsyncSession: The Session object of the requested Session + + """ page = 1 page_size = 50 get_session_response = await self.get_sessions(user_id, location_id, page, page_size) @@ -176,7 +225,7 @@ async def create_session( session_data (Dict, optional): Optional session metadata Returns: - Dict: The Session object of the new Session` + AsyncSession: The Session object of the new Session """ data = {"location_id": location_id, "session_data": session_data} @@ -195,6 +244,8 @@ async def create_session( class AsyncSession: + """Represents a single session for a user in an app""" + def __init__( self, client: AsyncClient, @@ -218,13 +269,16 @@ def __init__( @property def common_prefix(self): + """Shortcut for common API prefix. made a property to prevent tampering""" return f"{self.base_url}/apps/{self.app_id}" def __str__(self): - return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" + """String representation of Session""" + return f"AsyncSession(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" @property def is_active(self): + """Returns whether the session is active - made property to prevent tampering""" return self._is_active async def create_message(self, is_user: bool, content: str): @@ -235,7 +289,7 @@ async def create_message(self, is_user: bool, content: str): content (str): The content of the message Returns: - Dict: The Message object of the added message + Message: The Message object of the added message """ if not self.is_active: @@ -267,11 +321,11 @@ async def get_messages(self, page: int = 1, page_size: int = 50) -> AsyncGetMess """Get all messages for a session Args: - user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to retrieve + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page Returns: - list[Dict]: List of Message objects + AsyncGetMessagePage: Page of Message objects """ url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" @@ -281,6 +335,12 @@ async def get_messages(self, page: int = 1, page_size: int = 50) -> AsyncGetMess return AsyncGetMessagePage(self, data) async def get_messages_generator(self): + """Shortcut Generator for get_messages. Generator to iterate through all messages for a session in an app + + Yields: + Message: The Message object of the next Message + + """ page = 1 page_size = 50 get_messages_page= await self.get_messages(page, page_size) @@ -296,14 +356,15 @@ async def get_messages_generator(self): get_messages_page = new_messages async def create_metamessage(self, message: Message, metamessage_type: str, content: str): - """Adds a metamessage to the session + """Adds a metamessage to a session and links it to a specific message Args: - is_user (bool): Whether the message is from the user - content (str): The content of the message + message (Message): A message to associate the metamessage with + metamessage_type (str): The type of the metamessage arbitrary itentifier + content (str): The content of the metamessage Returns: - Dict: The Message object of the added message + Metamessage: The Metamessage object of the added metamessage """ if not self.is_active: @@ -317,7 +378,7 @@ async def create_metamessage(self, message: Message, metamessage_type: str, cont async def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: - """Get a specific message for a session based on ID + """Get a specific metamessage Args: message_id (uuid.UUID): The ID of the Message to retrieve @@ -358,6 +419,16 @@ async def get_metamessages(self, metamessage_type: Optional[str] = None, message return AsyncGetMetamessagePage(self, options, data) async def get_metamessages_generator(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None): + """Shortcut Generator for get_metamessages. Generator to iterate through all metamessages for a session in an app + + Args: + metamessage_type (str, optional): Optional Metamessage type to filter by + message (Message, optional): Optional Message to filter by + + Yields: + Metamessage: The next Metamessage object of the requested query + + """ page = 1 page_size = 50 get_metamessages_page = await self.get_metamessages(metamessage_type=metamessage_type, message=message, page=page, page_size=page_size) @@ -394,5 +465,6 @@ async def close(self): """Closes a session by marking it as inactive""" url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = await self.client.delete(url) + response.raise_for_status() self._is_active = False diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 8c8da1e..2adc4e4 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -5,7 +5,13 @@ from .schemas import Message, Metamessage class GetPage: + """Base class for receiving Paginated API results""" def __init__(self, response: Dict) -> None: + """Constructor for Page with relevant information about the results and pages + + Args: + response (Dict): Response from API with pagination information + """ self.total = response["total"] self.page = response["page"] self.page_size = response["size"] @@ -13,18 +19,24 @@ def __init__(self, response: Dict) -> None: self.items =[] def next(self): + """Shortcut method to Get the next page of results""" pass class GetSessionPage(GetPage): + """Paginated Results for Get Session Requests""" + def __init__(self, client, options: Dict, response: Dict): + """Constructor for Page Result from Session Get Request + + Args: + client (Client): Honcho Client + options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are user_id which is required and location_id which is optional + response (Dict): Response from API with pagination information + """ super().__init__(response) self.client = client self.user_id = options["user_id"] self.location_id = options["location_id"] - # self.total = response["total"] - # self.page = response["page"] - # self.page_size = response["size"] - # self.pages = response["pages"] self.items = [ Session( client=client, @@ -38,20 +50,26 @@ def __init__(self, client, options: Dict, response: Dict): ] def next(self): + """Get the next page of results + Returns: + GetSessionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ if self.page >= self.pages: return None - # user_id = self.items[0].user_id - # location_id = self.items[0].location_id return self.client.get_sessions(self.user_id, self.location_id, self.page + 1, self.page_size) class GetMessagePage(GetPage): + """Paginated Results for Get Session Requests""" + def __init__(self, session, response: Dict): + """Constructor for Page Result from Session Get Request + + Args: + session (Session): Session the returned messages are associated with + response (Dict): Response from API with pagination information + """ super().__init__(response) self.session = session - # self.total = response["total"] - # self.page = response["page"] - # self.page_size = response["size"] - # self.pages = response["pages"] self.items = [ Message( session_id=session.id, @@ -64,12 +82,24 @@ def __init__(self, session, response: Dict): ] def next(self): + """Get the next page of results + Returns: + GetMessagePage | None: Next Page of Results or None if there are no more messages to retreive from a query + """ if self.page >= self.pages: return None return self.session.get_messages((self.page + 1), self.page_size) class GetMetamessagePage(GetPage): + def __init__(self, session, options: Dict, response: Dict) -> None: + """Constructor for Page Result from Metamessage Get Request + + Args: + session (Session): Session the returned messages are associated with + options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both required + response (Dict): Response from API with pagination information + """ super().__init__(response) self.session = session self.message_id = options["message_id"] @@ -86,6 +116,10 @@ def __init__(self, session, options: Dict, response: Dict) -> None: ] def next(self): + """Get the next page of results + Returns: + GetMetamessagePage | None: Next Page of Results or None if there are no more metamessages to retreive from a query + """ if self.page >= self.pages: return None return self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) @@ -93,6 +127,8 @@ def next(self): class Client: + """Honcho API Client Object""" + def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): """Constructor for Client""" self.base_url = base_url # Base URL for the instance of the Honcho API @@ -101,6 +137,7 @@ def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): @property def common_prefix(self): + """Shorcut for common API prefix. made a property to prevent tampering""" return f"{self.base_url}/apps/{self.app_id}" def get_session(self, user_id: str, session_id: uuid.UUID): @@ -108,10 +145,10 @@ def get_session(self, user_id: str, session_id: uuid.UUID): Args: user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to retrieve + session_id (uuid.UUID): The ID of the Session to retrieve Returns: - Dict: The Session object of the requested Session + Session: The Session object of the requested Session """ url = f"{self.common_prefix}/users/{user_id}/sessions/{session_id}" @@ -128,14 +165,16 @@ def get_session(self, user_id: str, session_id: uuid.UUID): ) def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): - """Return sessions associated with a user + """Return sessions associated with a user paginated Args: user_id (str): The User ID representing the user, managed by the user location_id (str, optional): Optional Location ID representing the location of a session + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return Returns: - list[Dict]: List of Session objects + GetSessionPage: Page or results for get_sessions query """ url = f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}" + ( @@ -151,6 +190,16 @@ def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: in return GetSessionPage(self, options, data) def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): + """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + + Args: + user_id (str): The User ID representing the user, managed by the user + location_id (str, optional): Optional Location ID representing the location of a session + + Yields: + Session: The Session object of the requested Session + + """ page = 1 page_size = 50 get_session_response = self.get_sessions(user_id, location_id, page, page_size) @@ -176,7 +225,7 @@ def create_session( session_data (Dict, optional): Optional session metadata Returns: - Dict: The Session object of the new Session` + Session: The Session object of the new Session """ data = {"location_id": location_id, "session_data": session_data} @@ -195,6 +244,8 @@ def create_session( class Session: + """Represents a single session for a user in an app""" + def __init__( self, client: Client, @@ -218,13 +269,16 @@ def __init__( @property def common_prefix(self): + """Shortcut for common API prefix. made a property to prevent tampering""" return f"{self.base_url}/apps/{self.app_id}" def __str__(self): + """String representation of Session""" return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" @property def is_active(self): + """Returns whether the session is active - made property to prevent tampering""" return self._is_active def create_message(self, is_user: bool, content: str): @@ -235,7 +289,7 @@ def create_message(self, is_user: bool, content: str): content (str): The content of the message Returns: - Dict: The Message object of the added message + Message: The Message object of the added message """ if not self.is_active: @@ -267,11 +321,11 @@ def get_messages(self, page: int = 1, page_size: int = 50) -> GetMessagePage: """Get all messages for a session Args: - user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to retrieve + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page Returns: - list[Dict]: List of Message objects + GetMessagePage: Page of Message objects """ url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" @@ -281,6 +335,12 @@ def get_messages(self, page: int = 1, page_size: int = 50) -> GetMessagePage: return GetMessagePage(self, data) def get_messages_generator(self): + """Shortcut Generator for get_messages. Generator to iterate through all messages for a session in an app + + Yields: + Message: The Message object of the next Message + + """ page = 1 page_size = 50 get_messages_page= self.get_messages(page, page_size) @@ -296,14 +356,15 @@ def get_messages_generator(self): get_messages_page = new_messages def create_metamessage(self, message: Message, metamessage_type: str, content: str): - """Adds a metamessage to the session + """Adds a metamessage to a session and links it to a specific message Args: - is_user (bool): Whether the message is from the user - content (str): The content of the message + message (Message): A message to associate the metamessage with + metamessage_type (str): The type of the metamessage arbitrary itentifier + content (str): The content of the metamessage Returns: - Dict: The Message object of the added message + Metamessage: The Metamessage object of the added metamessage """ if not self.is_active: @@ -317,7 +378,7 @@ def create_metamessage(self, message: Message, metamessage_type: str, content: s def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: - """Get a specific message for a session based on ID + """Get a specific metamessage Args: message_id (uuid.UUID): The ID of the Message to retrieve @@ -358,6 +419,16 @@ def get_metamessages(self, metamessage_type: Optional[str] = None, message: Opti return GetMetamessagePage(self, options, data) def get_metamessages_generator(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None): + """Shortcut Generator for get_metamessages. Generator to iterate through all metamessages for a session in an app + + Args: + metamessage_type (str, optional): Optional Metamessage type to filter by + message (Message, optional): Optional Message to filter by + + Yields: + Metamessage: The next Metamessage object of the requested query + + """ page = 1 page_size = 50 get_metamessages_page = self.get_metamessages(metamessage_type=metamessage_type, message=message, page=page, page_size=page_size) @@ -394,5 +465,6 @@ def close(self): """Closes a session by marking it as inactive""" url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = self.client.delete(url) + response.raise_for_status() self._is_active = False From 716e8ca39a3c360901bab4c1edc82dcd6e6ee279 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 12:56:26 -0500 Subject: [PATCH 15/46] =?UTF-8?q?=F0=9F=A7=AA=20autogenerate=20sync=20test?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- scripts/syncronizer.py | 20 ++++++++++++++++++++ sdk/tests/test_sync.py | 31 +++++++++++++++---------------- 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/scripts/syncronizer.py b/scripts/syncronizer.py index 758f3eb..0630b8f 100644 --- a/scripts/syncronizer.py +++ b/scripts/syncronizer.py @@ -16,3 +16,23 @@ destination_file_path = os.path.join(this_dir, "../sdk/honcho/sync_client.py") with open(destination_file_path, "w") as destination_file: destination_file.write(sync_code) + + +# tests + +# Open the source file +source_file_path = os.path.join(this_dir, "../sdk/tests/test_async.py") +with open(source_file_path, "r") as source_file: + source_code = source_file.read() + +# Use regex to remove async mentions +sync_code = re.sub(r"@pytest.mark.asyncio\n", "", source_code) +sync_code = re.sub(r"async\s", "", sync_code) +sync_code = re.sub(r"await\s", "", sync_code) +sync_code = re.sub(r"__anext__", "__next__", sync_code) +sync_code = re.sub(r"Async", "", sync_code) + +# Write the modified code to the destination file +destination_file_path = os.path.join(this_dir, "../sdk/tests/test_sync.py") +with open(destination_file_path, "w") as destination_file: + destination_file.write(sync_code) diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index 135cc61..aba875f 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,7 +1,8 @@ +import pytest from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, Session, Message, Metamessage from honcho import Client as Honcho from uuid import uuid1 -import pytest + def test_session_creation_retrieval(): app_id = str(uuid1()) @@ -30,8 +31,8 @@ def test_session_multiple_retrieval(): def test_session_update(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) assert created_session.update({"foo": "bar"}) @@ -40,8 +41,8 @@ def test_session_update(): def test_session_deletion(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) assert created_session.is_active is True @@ -53,8 +54,8 @@ def test_session_deletion(): def test_messages(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) created_session.create_message(is_user=True, content="Hello") @@ -128,13 +129,13 @@ def test_paginated_sessions_generator(): gen = client.get_sessions_generator(user_id) # print(type(gen)) - item = next(gen) + item = gen.__next__() assert item.user_id == user_id assert isinstance(item, Session) - assert next(gen) is not None - assert next(gen) is not None + assert gen.__next__() is not None + assert gen.__next__() is not None with pytest.raises(StopIteration): - next(gen) + gen.__next__() def test_paginated_out_of_bounds(): app_id = str(uuid1()) @@ -183,7 +184,6 @@ def test_paginated_messages(): assert next_page is None - def test_paginated_messages_generator(): app_id = str(uuid1()) user_id = str(uuid1()) @@ -193,17 +193,16 @@ def test_paginated_messages_generator(): created_session.create_message(is_user=False, content="Hi") gen = created_session.get_messages_generator() - item = next(gen) + item = gen.__next__() assert isinstance(item, Message) assert item.content == "Hello" assert item.is_user is True - item2 = next(gen) + item2 = gen.__next__() assert item2 is not None assert item2.content == "Hi" assert item2.is_user is False with pytest.raises(StopIteration): - next(gen) - + gen.__next__() def test_paginated_metamessages(): app_id = str(uuid1()) @@ -246,16 +245,16 @@ def test_paginated_metamessages_generator(): created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 2") gen = created_session.get_metamessages_generator() - item = next(gen) + item = gen.__next__() assert isinstance(item, Metamessage) assert item.content == "Test 1" assert item.metamessage_type == "thought" - item2 = next(gen) + item2 = gen.__next__() assert item2 is not None assert item2.content == "Test 2" assert item2.metamessage_type == "thought" with pytest.raises(StopIteration): - next(gen) + gen.__next__() From 735d780a256675ced80aca98357029bb958e8329 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 14:50:10 -0500 Subject: [PATCH 16/46] test one --- .github/workflows/api_testing.yml | 32 +++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .github/workflows/api_testing.yml diff --git a/.github/workflows/api_testing.yml b/.github/workflows/api_testing.yml new file mode 100644 index 0000000..0ae177b --- /dev/null +++ b/.github/workflows/api_testing.yml @@ -0,0 +1,32 @@ +name: Run Tests +on: [push, pull_request] +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: "3.10" + - name: Install poetry + run: | + pip install poetry + - name: Start Server + run: | + cd api + poetry install --no-root + poetry run uvicorn src.main:app & + sleep 5 + cd .. + - name: Run Tests + run: | + cd sdk + poetry install + poetry run pytest + - name: Stop Server + run: | + kill $(jobs -p) || true + + + From 783156f61f381b6a742ffaaaa0ab225a2c9f3cde Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 14:52:31 -0500 Subject: [PATCH 17/46] add db type --- .github/workflows/api_testing.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/api_testing.yml b/.github/workflows/api_testing.yml index 0ae177b..cf6514e 100644 --- a/.github/workflows/api_testing.yml +++ b/.github/workflows/api_testing.yml @@ -19,6 +19,9 @@ jobs: poetry run uvicorn src.main:app & sleep 5 cd .. + env: + DATABASE_TYPE: sqlite + CONNECTION_URI: sqlite:///api.db - name: Run Tests run: | cd sdk From 287db71751c7d22716a0434e19393481695c7561 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 15:34:39 -0500 Subject: [PATCH 18/46] sync client --- .github/workflows/api_testing.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/api_testing.yml b/.github/workflows/api_testing.yml index cf6514e..f5e6780 100644 --- a/.github/workflows/api_testing.yml +++ b/.github/workflows/api_testing.yml @@ -12,6 +12,9 @@ jobs: - name: Install poetry run: | pip install poetry + - name: Syncify Client + run: | + python scripts/syncronizer.py - name: Start Server run: | cd api From 811c7268a839ea5b0d26a3a122f93b539995138b Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 15:38:22 -0500 Subject: [PATCH 19/46] add status badge --- README.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 47c9441..0aa1a9a 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,13 @@ # Honcho + ![Static Badge](https://img.shields.io/badge/Version-0.0.2-blue) [![Discord](https://img.shields.io/discord/1016845111637839922?style=flat&logo=discord&logoColor=23ffffff&label=Plastic%20Labs&labelColor=235865F2)](https://discord.gg/plasticlabs) ![GitHub License](https://img.shields.io/github/license/plastic-labs/honcho) ![GitHub Repo stars](https://img.shields.io/github/stars/plastic-labs/honcho) [![X (formerly Twitter) URL](https://img.shields.io/twitter/url?url=https%3A%2F%2Ftwitter.com%2Fplastic_labs)](https://twitter.com/plastic_labs) +[![Run Tests](https://github.com/plastic-labs/honcho/actions/workflows/api_testing.yml/badge.svg?branch=staging)](https://github.com/plastic-labs/honcho/actions/workflows/api_testing.yml) + A User context management solution for building AI Agents and LLM powered applications. @@ -48,7 +51,7 @@ poetry install # install dependencies 2. Copy the `.env.template` file and specify the type of database and connection_uri. For testing sqlite is fine. The below example uses an - in-memory sqlite database. + in-memory sqlite database. > Honcho has been tested with Postgresql and SQLite @@ -83,8 +86,7 @@ docker run --env-file .env -p 8000:8000 honcho-api:latest The API can also be deployed on fly.io. Follow the [Fly.io Docs](https://fly.io/docs/getting-started/) to setup your environment and the -`flyctl`. - +`flyctl`. Once `flyctl` is set up use the the following commands to launch the application: @@ -127,12 +129,12 @@ See more information [here](https://python-poetry.org/docs/cli/#add) This project is completely open source and welcomes any and all open source contributions. The workflow for contributing is to make a fork of the repository. You can claim an issue in the issues tab or start a new thread to -indicate a feature or bug fix you are working on. +indicate a feature or bug fix you are working on. Once you have finished your contribution make a PR pointed at the `staging` branch, and it will be reviewed by a project manager. Feel free to join us in our [discord](http://discord.gg/plasticlabs) to discuss your changes or get -help. +help. Once your changes are accepted and merged into staging they will undergo a period of live testing before entering the upstream into `main` From cb42724f721b6e13c7f578182c4c95fb3c368748 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 15:52:02 -0500 Subject: [PATCH 20/46] add coverage --- .github/workflows/api_testing.yml | 17 +++++- sdk/poetry.lock | 92 +++++++++++++++++++++++-------- sdk/pyproject.toml | 1 + 3 files changed, 87 insertions(+), 23 deletions(-) diff --git a/.github/workflows/api_testing.yml b/.github/workflows/api_testing.yml index f5e6780..52eed8c 100644 --- a/.github/workflows/api_testing.yml +++ b/.github/workflows/api_testing.yml @@ -29,7 +29,22 @@ jobs: run: | cd sdk poetry install - poetry run pytest + poetry run coverage run -m pytest + poetry run coverage xml coverage.xml + cd .. + - name: Code Coverage + uses: irongut/CodeCoverageSummary@v1.3.0 + with: + filename: sdk/coverage.xml + badge: true + format: markdown + + - name: Add Coverage PR Comment + uses: marocchino/sticky-pull-request-comment@v2 + if: github.event_name == 'pull_request' + with: + recreate: true + path: code-coverage-results.md - name: Stop Server run: | kill $(jobs -p) || true diff --git a/sdk/poetry.lock b/sdk/poetry.lock index e450ca8..965b0ac 100644 --- a/sdk/poetry.lock +++ b/sdk/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -27,7 +26,6 @@ trio = ["trio (>=0.23)"] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -39,7 +37,6 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -47,11 +44,74 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.4.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -66,7 +126,6 @@ test = ["pytest (>=6)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -78,7 +137,6 @@ files = [ name = "httpcore" version = "1.0.2" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -93,14 +151,13 @@ h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" version = "0.26.0" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -111,21 +168,20 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = ">=1.0.0,<2.0.0" +httpcore = "==1.*" idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -137,7 +193,6 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -149,7 +204,6 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -161,7 +215,6 @@ files = [ name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -177,7 +230,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -200,7 +252,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.23.4" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -219,7 +270,6 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -231,7 +281,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -243,7 +292,6 @@ files = [ name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -254,4 +302,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "6ccea662fa5a5bae88618123d5d05e0d4955c234b7e1a688d2fae2f90cd9f7f8" +content-hash = "cfdd3c0dc8dba3a70135da5b63a3b027968bb935d4846491c7bba2f30ac20a32" diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index ff695b5..ac25ab2 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -14,6 +14,7 @@ httpx = "^0.26.0" [tool.poetry.group.test.dependencies] pytest = "^7.4.4" pytest-asyncio = "^0.23.4" +coverage = "^7.4.1" [build-system] requires = ["poetry-core"] From 77f180ae497c02c3d858c2ce3464dbdf0ad8aa9f Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 16:04:56 -0500 Subject: [PATCH 21/46] add file --- .github/workflows/api_testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/api_testing.yml b/.github/workflows/api_testing.yml index 52eed8c..31341ec 100644 --- a/.github/workflows/api_testing.yml +++ b/.github/workflows/api_testing.yml @@ -37,8 +37,8 @@ jobs: with: filename: sdk/coverage.xml badge: true + output: file format: markdown - - name: Add Coverage PR Comment uses: marocchino/sticky-pull-request-comment@v2 if: github.event_name == 'pull_request' From f359be4dcabf5ed96fb479c3f07fe725838a9f51 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 16:10:10 -0500 Subject: [PATCH 22/46] give perms --- .github/workflows/api_testing.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/api_testing.yml b/.github/workflows/api_testing.yml index 31341ec..ea03bad 100644 --- a/.github/workflows/api_testing.yml +++ b/.github/workflows/api_testing.yml @@ -2,6 +2,8 @@ name: Run Tests on: [push, pull_request] jobs: test: + permissions: + pull-requests: write runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 From 178b4889c30fa73e4a27b5f4e77a6a687a1dc3b5 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 16:15:15 -0500 Subject: [PATCH 23/46] properly output coverage --- .github/workflows/api_testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/api_testing.yml b/.github/workflows/api_testing.yml index ea03bad..e7c40a6 100644 --- a/.github/workflows/api_testing.yml +++ b/.github/workflows/api_testing.yml @@ -32,7 +32,7 @@ jobs: cd sdk poetry install poetry run coverage run -m pytest - poetry run coverage xml coverage.xml + poetry run coverage xml -o coverage.xml cd .. - name: Code Coverage uses: irongut/CodeCoverageSummary@v1.3.0 From 18241c911ee2260d3cf5389028b1e79ce5e5c819 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 16:44:27 -0500 Subject: [PATCH 24/46] split test and coverage --- .../{api_testing.yml => run_coverage.yml} | 8 +--- .github/workflows/run_tests.yml | 38 +++++++++++++++++++ 2 files changed, 40 insertions(+), 6 deletions(-) rename .github/workflows/{api_testing.yml => run_coverage.yml} (91%) create mode 100644 .github/workflows/run_tests.yml diff --git a/.github/workflows/api_testing.yml b/.github/workflows/run_coverage.yml similarity index 91% rename from .github/workflows/api_testing.yml rename to .github/workflows/run_coverage.yml index e7c40a6..4b52640 100644 --- a/.github/workflows/api_testing.yml +++ b/.github/workflows/run_coverage.yml @@ -1,5 +1,5 @@ name: Run Tests -on: [push, pull_request] +on: [pull_request] jobs: test: permissions: @@ -43,13 +43,9 @@ jobs: format: markdown - name: Add Coverage PR Comment uses: marocchino/sticky-pull-request-comment@v2 - if: github.event_name == 'pull_request' with: recreate: true path: code-coverage-results.md - name: Stop Server run: | - kill $(jobs -p) || true - - - + kill $(jobs -p) || true \ No newline at end of file diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml new file mode 100644 index 0000000..05e3aa8 --- /dev/null +++ b/.github/workflows/run_tests.yml @@ -0,0 +1,38 @@ +name: Run Tests +on: [push, pull_request] +jobs: + test: + permissions: + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: "3.10" + - name: Install poetry + run: | + pip install poetry + - name: Syncify Client + run: | + python scripts/syncronizer.py + - name: Start Server + run: | + cd api + poetry install --no-root + poetry run uvicorn src.main:app & + sleep 5 + cd .. + env: + DATABASE_TYPE: sqlite + CONNECTION_URI: sqlite:///api.db + - name: Run Tests + run: | + cd sdk + poetry install + poetry run pytest + cd .. + - name: Stop Server + run: | + kill $(jobs -p) || true \ No newline at end of file From 555c84829453023bd42e5d3d983f73f277649053 Mon Sep 17 00:00:00 2001 From: hyusap Date: Wed, 14 Feb 2024 16:45:21 -0500 Subject: [PATCH 25/46] rename action --- .github/workflows/run_coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml index 4b52640..bc2d780 100644 --- a/.github/workflows/run_coverage.yml +++ b/.github/workflows/run_coverage.yml @@ -1,4 +1,4 @@ -name: Run Tests +name: Run Coverage on: [pull_request] jobs: test: From fcde94ec67d9273ae22a02ea702ee379c2e5c73d Mon Sep 17 00:00:00 2001 From: Ayush Paul Date: Thu, 15 Feb 2024 09:01:28 -0500 Subject: [PATCH 26/46] =?UTF-8?q?=F0=9F=A7=AA=20autogenerate=20sync=20test?= =?UTF-8?q?s=20(#16)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- scripts/syncronizer.py | 20 ++++++++++++++++++++ sdk/tests/test_sync.py | 31 +++++++++++++++---------------- 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/scripts/syncronizer.py b/scripts/syncronizer.py index 758f3eb..0630b8f 100644 --- a/scripts/syncronizer.py +++ b/scripts/syncronizer.py @@ -16,3 +16,23 @@ destination_file_path = os.path.join(this_dir, "../sdk/honcho/sync_client.py") with open(destination_file_path, "w") as destination_file: destination_file.write(sync_code) + + +# tests + +# Open the source file +source_file_path = os.path.join(this_dir, "../sdk/tests/test_async.py") +with open(source_file_path, "r") as source_file: + source_code = source_file.read() + +# Use regex to remove async mentions +sync_code = re.sub(r"@pytest.mark.asyncio\n", "", source_code) +sync_code = re.sub(r"async\s", "", sync_code) +sync_code = re.sub(r"await\s", "", sync_code) +sync_code = re.sub(r"__anext__", "__next__", sync_code) +sync_code = re.sub(r"Async", "", sync_code) + +# Write the modified code to the destination file +destination_file_path = os.path.join(this_dir, "../sdk/tests/test_sync.py") +with open(destination_file_path, "w") as destination_file: + destination_file.write(sync_code) diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index 135cc61..aba875f 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,7 +1,8 @@ +import pytest from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, Session, Message, Metamessage from honcho import Client as Honcho from uuid import uuid1 -import pytest + def test_session_creation_retrieval(): app_id = str(uuid1()) @@ -30,8 +31,8 @@ def test_session_multiple_retrieval(): def test_session_update(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) assert created_session.update({"foo": "bar"}) @@ -40,8 +41,8 @@ def test_session_update(): def test_session_deletion(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) assert created_session.is_active is True @@ -53,8 +54,8 @@ def test_session_deletion(): def test_messages(): - app_id = str(uuid1()) user_id = str(uuid1()) + app_id = str(uuid1()) client = Honcho(app_id, "http://localhost:8000") created_session = client.create_session(user_id) created_session.create_message(is_user=True, content="Hello") @@ -128,13 +129,13 @@ def test_paginated_sessions_generator(): gen = client.get_sessions_generator(user_id) # print(type(gen)) - item = next(gen) + item = gen.__next__() assert item.user_id == user_id assert isinstance(item, Session) - assert next(gen) is not None - assert next(gen) is not None + assert gen.__next__() is not None + assert gen.__next__() is not None with pytest.raises(StopIteration): - next(gen) + gen.__next__() def test_paginated_out_of_bounds(): app_id = str(uuid1()) @@ -183,7 +184,6 @@ def test_paginated_messages(): assert next_page is None - def test_paginated_messages_generator(): app_id = str(uuid1()) user_id = str(uuid1()) @@ -193,17 +193,16 @@ def test_paginated_messages_generator(): created_session.create_message(is_user=False, content="Hi") gen = created_session.get_messages_generator() - item = next(gen) + item = gen.__next__() assert isinstance(item, Message) assert item.content == "Hello" assert item.is_user is True - item2 = next(gen) + item2 = gen.__next__() assert item2 is not None assert item2.content == "Hi" assert item2.is_user is False with pytest.raises(StopIteration): - next(gen) - + gen.__next__() def test_paginated_metamessages(): app_id = str(uuid1()) @@ -246,16 +245,16 @@ def test_paginated_metamessages_generator(): created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 2") gen = created_session.get_metamessages_generator() - item = next(gen) + item = gen.__next__() assert isinstance(item, Metamessage) assert item.content == "Test 1" assert item.metamessage_type == "thought" - item2 = next(gen) + item2 = gen.__next__() assert item2 is not None assert item2.content == "Test 2" assert item2.metamessage_type == "thought" with pytest.raises(StopIteration): - next(gen) + gen.__next__() From 8421fda71c7c9696e69f80e4492c1582a41b150c Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:09:43 -0800 Subject: [PATCH 27/46] Vector Support (#18) * Scaffold for PGVector support * Buggy crud with logic skeleton on api * Crud logic and schema definition for pgvector * Populate all routes and refactor to name Collection * vince's progress * AsyncCollection progress * Local PGVector Docker Container * client methods for sdk except document delete and update * Vector Support Passing All Test Cases * Docs Updates --------- Co-authored-by: vintro --- README.md | 19 +- api/.env.template | 5 + api/CHANGELOG.md | 16 ++ api/local/docker-compose.yml | 14 ++ api/local/init.sql | 1 + api/poetry.lock | 170 +++++++++++++- api/pyproject.toml | 4 +- api/src/crud.py | 249 +++++++++++++++++++-- api/src/main.py | 199 +++++++++++++++-- api/src/models.py | 46 +++- api/src/schemas.py | 77 ++++++- sdk/CHANGELOG.md | 15 ++ sdk/honcho/__init__.py | 6 +- sdk/honcho/client.py | 417 ++++++++++++++++++++++++++++++++--- sdk/honcho/schemas.py | 12 + sdk/honcho/sync_client.py | 409 +++++++++++++++++++++++++++++++--- sdk/pyproject.toml | 2 +- sdk/tests/test_async.py | 99 ++++++++- sdk/tests/test_sync.py | 96 +++++++- 19 files changed, 1725 insertions(+), 131 deletions(-) create mode 100644 api/local/docker-compose.yml create mode 100644 api/local/init.sql diff --git a/README.md b/README.md index 47c9441..e67c360 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Honcho -![Static Badge](https://img.shields.io/badge/Version-0.0.2-blue) +![Static Badge](https://img.shields.io/badge/Version-0.0.3-blue) [![Discord](https://img.shields.io/discord/1016845111637839922?style=flat&logo=discord&logoColor=23ffffff&label=Plastic%20Labs&labelColor=235865F2)](https://discord.gg/plasticlabs) ![GitHub License](https://img.shields.io/github/license/plastic-labs/honcho) ![GitHub Repo stars](https://img.shields.io/github/stars/plastic-labs/honcho) @@ -50,14 +50,21 @@ poetry install # install dependencies connection_uri. For testing sqlite is fine. The below example uses an in-memory sqlite database. -> Honcho has been tested with Postgresql and SQLite +> Honcho has been tested with Postgresql and PGVector ```env -DATABASE_TYPE=sqlite -CONNECTION_URI=sqlite:// +DATABASE_TYPE=postgres +CONNECTION_URI=postgresql://testuser:testpwd@localhost:5432/honcho ``` -3. Run the API via uvicorn +3. launch a postgresd with pgvector enabled with docker-compose + +```bash +cd honcho/api/local +docker-compose up -d +``` + +4. Run the API via uvicorn ```bash cd honcho/api # change to the api directory @@ -86,7 +93,7 @@ Docs](https://fly.io/docs/getting-started/) to setup your environment and the `flyctl`. -Once `flyctl` is set up use the the following commands to launch the application: +Once `flyctl` is set up use the following commands to launch the application: ```bash cd honcho/api diff --git a/api/.env.template b/api/.env.template index 3e73476..04ee33b 100644 --- a/api/.env.template +++ b/api/.env.template @@ -1,2 +1,7 @@ DATABASE_TYPE=sqlite CONNECTION_URI=sqlite:// + +DATABASE_TYPE=postgres +CONNECTION_URI=postgresql://testuser:testpwd@localhost:5432/honcho + +OPENAI_API_KEY= diff --git a/api/CHANGELOG.md b/api/CHANGELOG.md index 56d7915..84cd89e 100644 --- a/api/CHANGELOG.md +++ b/api/CHANGELOG.md @@ -4,6 +4,22 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.3] — 2024-02-15 + +### Added + +* Collections table to reference a collection of embedding documents +* Documents table to hold vector embeddings for RAG workflows +* Local scripts for running a postgres database with pgvector installed +* OpenAI Dependency for embedding models +* PGvector dependency for vector db support + +### Changed + +* session_data is now metadata +* session_data is a JSON field used python `dict` for compatability + + ## [0.0.2] — 2024-02-01 ### Added diff --git a/api/local/docker-compose.yml b/api/local/docker-compose.yml new file mode 100644 index 0000000..ea0f376 --- /dev/null +++ b/api/local/docker-compose.yml @@ -0,0 +1,14 @@ +services: + db: + hostname: db + image: ankane/pgvector + ports: + - 5432:5432 + restart: always + environment: + - POSTGRES_DB=honcho + - POSTGRES_USER=testuser + - POSTGRES_PASSWORD=testpwd + - POSTGRES_HOST_AUTH_METHOD=trust + volumes: + - ./init.sql:/docker-entrypoint-initdb.d/init.sql diff --git a/api/local/init.sql b/api/local/init.sql new file mode 100644 index 0000000..0aa0fc2 --- /dev/null +++ b/api/local/init.sql @@ -0,0 +1 @@ +CREATE EXTENSION IF NOT EXISTS vector; diff --git a/api/poetry.lock b/api/poetry.lock index 252389c..3b8e3cd 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -37,6 +37,18 @@ doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd- test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + [[package]] name = "click" version = "8.1.7" @@ -82,6 +94,18 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -236,6 +260,53 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "httpcore" +version = "1.0.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] +trio = ["trio (>=0.22.0,<0.24.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = ">=1.0.0,<2.0.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + [[package]] name = "idna" version = "3.6" @@ -297,6 +368,68 @@ mongodb = ["pymongo (>4.1,<5)"] redis = ["redis (>3,!=4.5.2,!=4.5.3,<6.0.0)"] rediscluster = ["redis (>=4.2.0,!=4.5.2,!=4.5.3)"] +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "openai" +version = "1.12.0" +description = "The official Python library for the openai API" +category = "main" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-1.12.0-py3-none-any.whl", hash = "sha256:a54002c814e05222e413664f651b5916714e4700d041d5cf5724d3ae1a3e3481"}, + {file = "openai-1.12.0.tar.gz", hash = "sha256:99c5d257d09ea6533d689d1cc77caa0ac679fa21efef8893d8b0832a86877f1b"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.7,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + [[package]] name = "packaging" version = "23.2" @@ -309,6 +442,20 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pgvector" +version = "0.2.5" +description = "pgvector support for Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"}, +] + +[package.dependencies] +numpy = "*" + [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -681,6 +828,27 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\"" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.9.0" @@ -812,4 +980,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "115cde0c7dc1de7906b4f17bbdaced3f98a969c33c3b85976a1dc5b0aeece3e2" +content-hash = "90a0874f29e706994647a141418ed4eca5bd621518396d525d27039ad586e4bc" diff --git a/api/pyproject.toml b/api/pyproject.toml index 6777d84..6034c3f 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "honcho" -version = "0.0.2" +version = "0.0.3" description = "Honcho Server" authors = ["Plastic Labs "] readme = "README.md" @@ -14,6 +14,8 @@ sqlalchemy = "^2.0.25" psycopg2-binary = "^2.9.9" slowapi = "^0.1.8" fastapi-pagination = "^0.12.14" +pgvector = "^0.2.5" +openai = "^1.12.0" [build-system] diff --git a/api/src/crud.py b/api/src/crud.py index a130892..335ce10 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -1,12 +1,16 @@ -import json import uuid -from typing import Optional +import datetime +from typing import Optional, Sequence + +from openai import OpenAI from sqlalchemy import select, Select from sqlalchemy.orm import Session +from sqlalchemy.exc import IntegrityError from . import models, schemas +openai_client = OpenAI() def get_session(db: Session, app_id: str, session_id: uuid.UUID, user_id: Optional[str] = None) -> Optional[models.Session]: stmt = select(models.Session).where(models.Session.app_id == app_id).where(models.Session.id == session_id) @@ -14,8 +18,6 @@ def get_session(db: Session, app_id: str, session_id: uuid.UUID, user_id: Option stmt = stmt.where(models.Session.user_id == user_id) session = db.scalars(stmt).one_or_none() return session - # return db.query(models.Session).filter(models.Session.id == session_id).first() - def get_sessions( db: Session, app_id: str, user_id: str, location_id: str | None = None @@ -32,16 +34,15 @@ def get_sessions( stmt = stmt.where(models.Session.location_id == location_id) return stmt - # return db.scalars(stmt).all() def create_session( - db: Session, app_id: str, user_id: str, session: schemas.SessionCreate + db: Session, session: schemas.SessionCreate, app_id: str, user_id: str ) -> models.Session: honcho_session = models.Session( app_id=app_id, user_id=user_id, location_id=session.location_id, - session_data=json.dumps(session.session_data), + h_metadata=session.metadata, ) db.add(honcho_session) db.commit() @@ -49,16 +50,19 @@ def create_session( return honcho_session -def update_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID, session_data: dict) -> bool: +def update_session( + db: Session, session: schemas.SessionUpdate, app_id: str, user_id: str, session_id: uuid.UUID +) -> bool: honcho_session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) if honcho_session is None: raise ValueError("Session not found or does not belong to user") - honcho_session.session_data = json.dumps(session_data) + if session.metadata is not None: # Need to explicitly be there won't make it empty by default + honcho_session.h_metadata = session.metadata db.commit() db.refresh(honcho_session) + copy = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) return honcho_session - def delete_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID) -> bool: stmt = ( select(models.Session) @@ -73,7 +77,6 @@ def delete_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID db.commit() return True - def create_message( db: Session, message: schemas.MessageCreate, app_id: str, user_id: str, session_id: uuid.UUID ) -> models.Message: @@ -91,13 +94,9 @@ def create_message( db.refresh(honcho_message) return honcho_message - def get_messages( db: Session, app_id: str, user_id: str, session_id: uuid.UUID ) -> Select: - # session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) - # if session is None: - # raise ValueError("Session not found or does not belong to user") stmt = ( select(models.Message) .join(models.Session, models.Session.id == models.Message.session_id) @@ -107,19 +106,10 @@ def get_messages( .order_by(models.Message.created_at) ) return stmt - # return db.scalars(stmt).all() - # return ( - # db.query(models.Message) - # .filter(models.Message.session_id == session_id) - # .all() - # ) def get_message( db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID ) -> Optional[models.Message]: - # session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) - # if session is None: - # raise ValueError("Session not found or does not belong to user") stmt = ( select(models.Message) .join(models.Session, models.Session.id == models.Message.session_id) @@ -131,6 +121,9 @@ def get_message( ) return db.scalars(stmt).one_or_none() +######################################################## +# metamessage methods +######################################################## def get_metamessages(db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: Optional[uuid.UUID], metamessage_type: Optional[str] = None) -> Select: stmt = ( @@ -185,3 +178,211 @@ def create_metamessage( db.commit() db.refresh(honcho_metamessage) return honcho_metamessage + +######################################################## +# collection methods +######################################################## + +# Should be very similar to the session methods + +def get_collections(db: Session, app_id: str, user_id: str) -> Select: + """Get a distinct list of the names of collections associated with a user""" + stmt = ( + select(models.Collection) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .order_by(models.Collection.created_at) + ) + return stmt + +def get_collection_by_id(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID) -> Optional[models.Collection]: + stmt = ( + select(models.Collection) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Collection.id == collection_id) + ) + collection = db.scalars(stmt).one_or_none() + return collection + +def get_collection_by_name(db: Session, app_id: str, user_id: str, name: str) -> Optional[models.Collection]: + stmt = ( + select(models.Collection) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Collection.name == name) + ) + collection = db.scalars(stmt).one_or_none() + return collection + +def create_collection( + db: Session, collection: schemas.CollectionCreate, app_id: str, user_id: str +) -> models.Collection: + honcho_collection = models.Collection( + app_id=app_id, + user_id=user_id, + name=collection.name, + ) + try: + db.add(honcho_collection) + db.commit() + except IntegrityError: + db.rollback() + raise ValueError("Collection already exists") + db.refresh(honcho_collection) + return honcho_collection + +def update_collection( + db: Session, collection: schemas.CollectionUpdate, app_id: str, user_id: str, collection_id: uuid.UUID +) -> models.Collection: + honcho_collection = get_collection_by_id(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + if honcho_collection is None: + raise ValueError("collection not found or does not belong to user") + try: + honcho_collection.name = collection.name + db.commit() + except IntegrityError: + db.rollback() + raise ValueError("Collection already exists") + db.refresh(honcho_collection) + return honcho_collection + +def delete_collection( + db: Session, app_id: str, user_id: str, collection_id: uuid.UUID +) -> bool: + """ + Delete a Collection and all documents associated with it. Takes advantage of + the orm cascade feature + """ + stmt = ( + select(models.Collection) + .where(models.Collection.id == collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + ) + honcho_collection = db.scalars(stmt).one_or_none() + if honcho_collection is None: + return False + db.delete(honcho_collection) + db.commit() + return True + +######################################################## +# document methods +######################################################## + +# Should be similar to the messages methods outside of query + +def get_documents( + db: Session, app_id: str, user_id: str, collection_id: uuid.UUID +) -> Select: + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .order_by(models.Document.created_at) + ) + return stmt + +def get_document( + db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID +) -> Optional[models.Document]: + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .where(models.Document.id == document_id) + ) + + document = db.scalars(stmt).one_or_none() + return document + + +def query_documents(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, query: str, top_k: int = 5) -> Sequence[models.Document]: + response = openai_client.embeddings.create( + input=query, + model="text-embedding-3-small" + ) + embedding_query = response.data[0].embedding + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .order_by(models.Document.embedding.cosine_distance(embedding_query)) + .limit(top_k) + ) + # if metadata is not None: + # stmt = stmt.where(models.Document.h_metadata.contains(metadata)) + return db.scalars(stmt).all() + +def create_document( + db: Session, document: schemas.DocumentCreate, app_id: str, user_id: str, collection_id: uuid.UUID +) -> models.Document: + """Embed a message as a vector and create a document""" + collection = get_collection_by_id(db, app_id=app_id, collection_id=collection_id, user_id=user_id) + if collection is None: + raise ValueError("Session not found or does not belong to user") + + response = openai_client.embeddings.create( + input=document.content, + model="text-embedding-3-small" + ) + + embedding = response.data[0].embedding + + honcho_document = models.Document( + collection_id=collection_id, + content=document.content, + h_metadata=document.metadata, + embedding=embedding + ) + db.add(honcho_document) + db.commit() + db.refresh(honcho_document) + return honcho_document + +def update_document( + db: Session, document: schemas.DocumentUpdate, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID +) -> bool: + honcho_document = get_document(db, app_id=app_id, collection_id=collection_id, user_id=user_id, document_id=document_id) + if honcho_document is None: + raise ValueError("Session not found or does not belong to user") + if document.content is not None: + honcho_document.content = document.content + response = openai_client.embeddings.create( + input=document.content, + model="text-embedding-3-small" + ) + embedding = response.data[0].embedding + honcho_document.embedding = embedding + honcho_document.created_at = datetime.datetime.now() + + if document.metadata is not None: + honcho_document.h_metadata = document.metadata + db.commit() + db.refresh(honcho_document) + return honcho_document + +def delete_document(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID) -> bool: + stmt = ( + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .where(models.Document.id == document_id) + ) + document = db.scalars(stmt).one_or_none() + if document is None: + return False + db.delete(document) + db.commit() + return True + + diff --git a/api/src/main.py b/api/src/main.py index c3ef0d8..7a16e9c 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -1,6 +1,6 @@ import uuid from fastapi import Depends, FastAPI, HTTPException, APIRouter, Request -from typing import Optional +from typing import Optional, Sequence from sqlalchemy.orm import Session from slowapi import Limiter, _rate_limit_exceeded_handler from slowapi.middleware import SlowAPIMiddleware @@ -9,7 +9,6 @@ from fastapi_pagination import Page, add_pagination from fastapi_pagination.ext.sqlalchemy import paginate -# import uvicorn from . import crud, models, schemas from .db import SessionLocal, engine @@ -44,7 +43,13 @@ def get_db(): ######################################################## @router.get("/sessions", response_model=Page[schemas.Session]) -def get_sessions(request: Request, app_id: str, user_id: str, location_id: Optional[str] = None, db: Session = Depends(get_db)): +def get_sessions( + request: Request, + app_id: str, + user_id: str, + location_id: Optional[str] = None, + db: Session = Depends(get_db) +): """Get All Sessions for a User Args: @@ -56,11 +61,7 @@ def get_sessions(request: Request, app_id: str, user_id: str, location_id: Optio list[schemas.Session]: List of Session objects """ - # if location_id is not None: - # return paginate(db, crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id)) - # return crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id) return paginate(db, crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id)) - # return crud.get_sessions(db, app_id=app_id, user_id=user_id) @router.post("/sessions", response_model=schemas.Session) @@ -102,10 +103,10 @@ def update_session( schemas.Session: The Session object of the updated Session """ - if session.session_data is None: - raise HTTPException(status_code=400, detail="Session data cannot be empty") # TODO TEST if I can set the metadata to be blank with this + if session.metadata is None: + raise HTTPException(status_code=400, detail="Session metadata cannot be empty") # TODO TEST if I can set the metadata to be blank with this try: - return crud.update_session(db, app_id=app_id, user_id=user_id, session_id=session_id, session_data=session.session_data) + return crud.update_session(db, app_id=app_id, user_id=user_id, session_id=session_id, session=session) except ValueError: raise HTTPException(status_code=404, detail="Session not found") @@ -243,10 +244,8 @@ def get_message( raise HTTPException(status_code=404, detail="Session not found") return honcho_message - - ######################################################## -# Metacognition Routes +# metamessage routes ######################################################## @router.post( @@ -276,9 +275,6 @@ def create_metamessage( HTTPException: If the session is not found """ - print("=======================") - print(request) - print("=======================") try: return crud.create_metamessage(db, metamessage=metamessage, app_id=app_id, user_id=user_id, session_id=session_id) except ValueError: @@ -336,5 +332,176 @@ def get_metamessage(request: Request, app_id: str, user_id: str, session_id: uui raise HTTPException(status_code=404, detail="Session not found") return honcho_metamessage +######################################################## +# collection routes +######################################################## + +@router.get("/collections/all", response_model=Page[schemas.Collection]) +def get_collections( + request: Request, + app_id: str, + user_id: str, + db: Session = Depends(get_db), +): + return paginate(db, crud.get_collections(db, app_id=app_id, user_id=user_id)) + +@router.get("/collections/id/{collection_id}", response_model=schemas.Collection) +def get_collection_by_id( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + db: Session = Depends(get_db) +) -> schemas.Collection: + honcho_collection = crud.get_collection_by_id(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + if honcho_collection is None: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + return honcho_collection + +@router.get("/collections/name/{name}", response_model=schemas.Collection) +def get_collection_by_name( + request: Request, + app_id: str, + user_id: str, + name: str, + db: Session = Depends(get_db) +) -> schemas.Collection: + honcho_collection = crud.get_collection_by_name(db, app_id=app_id, user_id=user_id, name=name) + if honcho_collection is None: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + return honcho_collection + +@router.post("/collections", response_model=schemas.Collection) +def create_collection( + request: Request, + app_id: str, + user_id: str, + collection: schemas.CollectionCreate, + db: Session = Depends(get_db) +): + try: + return crud.create_collection(db, collection=collection, app_id=app_id, user_id=user_id) + except ValueError: + raise HTTPException(status_code=406, detail="Error invalid collection configuration - name may already exist") + +@router.put("/collections/{collection_id}", response_model=schemas.Collection) +def update_collection( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + collection: schemas.CollectionUpdate, + db: Session = Depends(get_db) +): + if collection.name is None: + raise HTTPException(status_code=400, detail="invalid request - name cannot be None") + try: + honcho_collection = crud.update_collection(db, collection=collection, app_id=app_id, user_id=user_id, collection_id=collection_id) + except ValueError: + raise HTTPException(status_code=406, detail="Error invalid collection configuration - name may already exist") + return honcho_collection + +@router.delete("/collections/{collection_id}") +def delete_collection( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + db: Session = Depends(get_db) +): + response = crud.delete_collection(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + if response: + return {"message": "Collection deleted successfully"} + else: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + +######################################################## +# Document routes +######################################################## + +@router.get("/collections/{collection_id}/documents", response_model=Page[schemas.Document]) +def get_documents( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + db: Session = Depends(get_db) +): + try: + return paginate(db, crud.get_documents(db, app_id=app_id, user_id=user_id, collection_id=collection_id)) + except ValueError: # TODO can probably remove this exception ok to return empty here + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + +router.get("/collections/{collection_id}/documents/{document_id}", response_model=schemas.Document) +def get_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, + db: Session = Depends(get_db) +): + honcho_document = crud.get_document(db, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + if honcho_document is None: + raise HTTPException(status_code=404, detail="document not found or does not belong to user") + return honcho_document + + +@router.get("/collections/{collection_id}/query", response_model=Sequence[schemas.Document]) +def query_documents( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + query: str, + top_k: int = 5, + db: Session = Depends(get_db) +): + if top_k is not None and top_k > 50: + top_k = 50 # TODO see if we need to paginate this + return crud.query_documents(db=db, app_id=app_id, user_id=user_id, collection_id=collection_id, query=query, top_k=top_k) + +@router.post("/collections/{collection_id}/documents", response_model=schemas.Document) +def create_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document: schemas.DocumentCreate, + db: Session = Depends(get_db) +): + try: + return crud.create_document(db, document=document, app_id=app_id, user_id=user_id, collection_id=collection_id) + except ValueError: + raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + +@router.put("/collections/{collection_id}/documents/{document_id}", response_model=schemas.Document) +def update_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, + document: schemas.DocumentUpdate, + db: Session = Depends(get_db) +): + if document.content is None and document.metadata is None: + raise HTTPException(status_code=400, detail="content and metadata cannot both be None") + return crud.update_document(db, document=document, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + +@router.delete("/collections/{collection_id}/documents/{document_id}") +def delete_document( + request: Request, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, + db: Session = Depends(get_db) +): + response = crud.delete_document(db, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + if response: + return {"message": "Document deleted successfully"} + else: + raise HTTPException(status_code=404, detail="document not found or does not belong to user") app.include_router(router) diff --git a/api/src/models.py b/api/src/models.py index 4371229..ea4b86b 100644 --- a/api/src/models.py +++ b/api/src/models.py @@ -1,10 +1,20 @@ -from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DateTime, Uuid -import uuid import datetime -from sqlalchemy.orm import relationship, Mapped, mapped_column +import os +import uuid + +from dotenv import load_dotenv +from pgvector.sqlalchemy import Vector +from sqlalchemy import JSON, Column, ForeignKey, String, UniqueConstraint, Uuid +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import Mapped, mapped_column, relationship from .db import Base +load_dotenv() + +DATABASE_TYPE = os.getenv("DATABASE_TYPE", 'postgres') + +ColumnType = JSONB if DATABASE_TYPE == 'postgres' else JSON class Session(Base): __tablename__ = "sessions" @@ -13,13 +23,12 @@ class Session(Base): user_id: Mapped[str] = mapped_column(String(512), index=True) location_id: Mapped[str] = mapped_column(String(512), index=True) is_active: Mapped[bool] = mapped_column(default=True) - session_data: Mapped[str] + h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) messages = relationship("Message", back_populates="session") def __repr__(self) -> str: - return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, is_active={self.is_active}, created_at={self.created_at})" - + return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, is_active={self.is_active}, created_at={self.created_at}, h_metadata={self.h_metadata})" class Message(Base): __tablename__ = "messages" @@ -34,7 +43,6 @@ class Message(Base): def __repr__(self) -> str: return f"Message(id={self.id}, session_id={self.session_id}, is_user={self.is_user}, content={self.content[10:]})" - class Metamessage(Base): __tablename__ = "metamessages" id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) @@ -47,3 +55,27 @@ class Metamessage(Base): def __repr__(self) -> str: return f"Metamessages(id={self.id}, message_id={self.message_id}, metamessage_type={self.metamessage_type}, content={self.content[10:]})" + +class Collection(Base): + __tablename__ = "collections" + id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(512), index=True) + app_id: Mapped[str] = mapped_column(String(512), index=True) + user_id: Mapped[str] = mapped_column(String(512), index=True) + created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) + documents = relationship("Document", back_populates="collection", cascade="all, delete, delete-orphan") + + __table_args__ = ( + UniqueConstraint('name', 'app_id', 'user_id', name="unique_name_app_user"), + ) + +class Document(Base): + __tablename__ = "documents" + id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) + content: Mapped[str] = mapped_column(String(65535)) + embedding = mapped_column(Vector(1536)) + created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) + + collection_id = Column(Uuid, ForeignKey("collections.id")) + collection = relationship("Collection", back_populates="documents") diff --git a/api/src/schemas.py b/api/src/schemas.py index b6bff90..fe164aa 100644 --- a/api/src/schemas.py +++ b/api/src/schemas.py @@ -1,4 +1,4 @@ -from pydantic import BaseModel +from pydantic import BaseModel, validator import datetime import uuid @@ -18,8 +18,7 @@ class Message(MessageBase): created_at: datetime.datetime class Config: - orm_mode = True - + from_attributes = True class SessionBase(BaseModel): pass @@ -27,12 +26,10 @@ class SessionBase(BaseModel): class SessionCreate(SessionBase): location_id: str - session_data: dict | None = None - - + metadata: dict | None = {} + class SessionUpdate(SessionBase): - session_data: dict | None = None - + metadata: dict | None = None class Session(SessionBase): id: uuid.UUID @@ -41,11 +38,21 @@ class Session(SessionBase): user_id: str location_id: str app_id: str - session_data: str + h_metadata: dict + metadata: dict created_at: datetime.datetime + @validator('metadata', pre=True, allow_reuse=True) + def fetch_h_metadata(cls, value, values): + if 'h_metadata' in values: + return values['h_metadata'] + return {} + class Config: - orm_mode = True + from_attributes = True + schema_extra = { + "exclude": ["h_metadata"] + } class MetamessageBase(BaseModel): @@ -64,3 +71,53 @@ class Metamessage(MetamessageBase): class Config: orm_mode = True + +class CollectionBase(BaseModel): + pass + +class CollectionCreate(CollectionBase): + name: str + +class CollectionUpdate(CollectionBase): + name: str + +class Collection(CollectionBase): + id: uuid.UUID + name: str + app_id: str + user_id: str + created_at: datetime.datetime + + class Config: + orm_mode = True + +class DocumentBase(BaseModel): + content: str + +class DocumentCreate(DocumentBase): + metadata: dict | None = {} + +class DocumentUpdate(DocumentBase): + metadata: dict | None = None + content: str | None = None + +class Document(DocumentBase): + id: uuid.UUID + content: str + h_metadata: dict + metadata: dict + created_at: datetime.datetime + collection_id: uuid.UUID + + @validator('metadata', pre=True, allow_reuse=True) + def fetch_h_metadata(cls, value, values): + if 'h_metadata' in values: + return values['h_metadata'] + return {} + + class Config: + from_attributes = True + schema_extra = { + "exclude": ["h_metadata"] + } + diff --git a/sdk/CHANGELOG.md b/sdk/CHANGELOG.md index 1d8ecd5..54965e4 100644 --- a/sdk/CHANGELOG.md +++ b/sdk/CHANGELOG.md @@ -6,6 +6,21 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.3] — 2024-02-15 + +### Added + +* Collections table to reference a collection of embedding documents +* Documents table to hold vector embeddings for RAG workflows +* Local scripts for running a postgres database with pgvector installed +* OpenAI Dependency for embedding models +* PGvector dependency for vector db support + +### Changed + +* session_data is now metadata +* session_data is a JSON field used python `dict` for compatability + ## [0.0.2] — 2024-02-08 ### Added diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index e87b439..eda9003 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,4 +1,4 @@ -from .client import AsyncClient, AsyncSession, AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage -from .sync_client import Client, Session, GetSessionPage, GetMessagePage, GetMetamessagePage -from .schemas import Message, Metamessage +from .client import AsyncClient, AsyncSession, AsyncCollection, AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncGetDocumentPage, AsyncGetCollectionPage +from .sync_client import Client, Session, Collection, GetSessionPage, GetMessagePage, GetMetamessagePage, GetDocumentPage, GetCollectionPage +from .schemas import Message, Metamessage, Document from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index c5a1373..507bfba 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -1,8 +1,8 @@ -import json import uuid -from typing import Dict, Optional +import datetime +from typing import Dict, Optional, List import httpx -from .schemas import Message, Metamessage +from .schemas import Message, Metamessage, Document class AsyncGetPage: """Base class for receiving Paginated API results""" @@ -44,7 +44,8 @@ def __init__(self, client, options: Dict, response: Dict): user_id=session["user_id"], location_id=session["location_id"], is_active=session["is_active"], - session_data=session["session_data"], + metadata=session["metadata"], + created_at=session["created_at"], ) for session in response["items"] ] @@ -97,13 +98,13 @@ def __init__(self, session, options: Dict, response: Dict) -> None: Args: session (AsyncSession): Session the returned messages are associated with - options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both required + options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both optional response (Dict): Response from API with pagination information """ super().__init__(response) self.session = session - self.message_id = options["message_id"] - self.metamessage_type = options["metamessage_type"] + self.message_id = options["message_id"] if "message_id" in options else None + self.metamessage_type = options["metamessage_type"] if "metamessage_type" in options else None self.items = [ Metamessage( id=metamessage["id"], @@ -124,7 +125,70 @@ async def next(self): return None return await self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) +class AsyncGetDocumentPage(AsyncGetPage): + """Paginated results for Get Document requests""" + def __init__(self, collection, response: Dict) -> None: + """Constructor for Page Result from Document Get Request + + Args: + collection (AsyncCollection): Collection the returned documents are associated with + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.collection = collection + self.items = [ + Document( + id=document["id"], + collection_id=collection.id, + content=document["content"], + metadata=document["metadata"], + created_at=document["created_at"], + ) + for document in response["items"] + ] + async def next(self): + """Get the next page of results + Returns: + AsyncGetDocumentPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return await self.collection.get_documents(page=self.page + 1, page_size=self.page_size) + +class AsyncGetCollectionPage(AsyncGetPage): + """Paginated results for Get Collection requests""" + + def __init__(self, client, options: Dict, response: Dict): + """Constructor for page result from Get Collection Request + + Args: + client (Async Client): Honcho Client + options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.client = client + self.user_id = options["user_id"] + self.items = [ + AsyncCollection( + client=client, + id=collection["id"], + user_id=collection["user_id"], + name=collection["name"], + created_at=collection["created_at"], + ) + for collection in response["items"] + ] + + async def next(self): + """Get the next page of results + Returns: + AsyncGetCollectionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return await self.client.get_collections(user_id=self.user_id, page=self.page + 1, page_size=self.page_size) class AsyncClient: """Honcho API Client Object""" @@ -161,7 +225,8 @@ async def get_session(self, user_id: str, session_id: uuid.UUID): user_id=data["user_id"], location_id=data["location_id"], is_active=data["is_active"], - session_data=data["session_data"], + metadata=data["metadata"], + created_at=data["created_at"] ) async def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): @@ -215,20 +280,20 @@ async def get_sessions_generator(self, user_id: str, location_id: Optional[str] get_session_response = new_sessions async def create_session( - self, user_id: str, location_id: str = "default", session_data: Dict = {} + self, user_id: str, location_id: str = "default", metadata: Dict = {} ): """Create a session for a user Args: user_id (str): The User ID representing the user, managed by the user location_id (str, optional): Optional Location ID representing the location of a session - session_data (Dict, optional): Optional session metadata + metadata (Dict, optional): Optional session metadata Returns: AsyncSession: The Session object of the new Session """ - data = {"location_id": location_id, "session_data": session_data} + data = {"location_id": location_id, "metadata": metadata} url = f"{self.common_prefix}/users/{user_id}/sessions" response = await self.client.post(url, json=data) response.raise_for_status() @@ -238,10 +303,103 @@ async def create_session( id=data["id"], user_id=user_id, location_id=location_id, - session_data=session_data, + metadata=metadata, is_active=data["is_active"], + created_at=data["created_at"], ) + async def create_collection( + self, user_id: str, name: str, + ): + """Create a collection for a user + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): unique name for the collection for the user + + Returns: + AsyncCollection: The Collection object of the new Collection + + """ + data = {"name": name} + url = f"{self.common_prefix}/users/{user_id}/collections" + response = await self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return AsyncCollection( + self, + id=data["id"], + user_id=user_id, + name=name, + created_at=data["created_at"], + ) + + async def get_collection(self, user_id: str, name: str): + """Get a specific collection for a user by name + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): The name of the collection to get + + Returns: + AsyncCollection: The Session object of the requested Session + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/name/{name}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncCollection( + client=self, + id=data["id"], + user_id=data["user_id"], + name=data["name"], + created_at=data["created_at"] + ) + + async def get_collections(self, user_id: str, page: int = 1, page_size: int = 50): + """Return collections associated with a user paginated + + Args: + user_id (str): The User ID representing the user to get the collection for + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return + + Returns: + AsyncGetCollectionPage: Page or results for get_collections query + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + options = {"user_id": user_id} + return AsyncGetCollectionPage(self, options, data) + + async def get_collections_generator(self, user_id: str): + """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + + Args: + user_id (str): The User ID representing the user, managed by the user + + Yields: + AsyncCollection: The Session object of the requested Session + + """ + page = 1 + page_size = 50 + get_collection_response = await self.get_collections(user_id, page, page_size) + while True: + # get_collection_response = self.get_collections(user_id, location_id, page, page_size) + for collection in get_collection_response.items: + yield collection + + new_collections = await get_collection_response.next() + if not new_collections: + break + + get_collection_response = new_collections + class AsyncSession: """Represents a single session for a user in an app""" @@ -252,20 +410,20 @@ def __init__( id: uuid.UUID, user_id: str, location_id: str, - session_data: dict | str, + metadata: dict, is_active: bool, + created_at: datetime.datetime ): """Constructor for Session""" - self.base_url = client.base_url - self.client = client.client - self.app_id = client.app_id - self.id = id - self.user_id = user_id - self.location_id = location_id - self.session_data = ( - session_data if isinstance(session_data, dict) else json.loads(session_data) - ) - self._is_active = is_active + self.base_url: str = client.base_url + self.client: httpx.AsyncClient = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.location_id: str = location_id + self.metadata: dict = metadata + self._is_active: bool = is_active + self.created_at: datetime.datetime = created_at @property def common_prefix(self): @@ -274,7 +432,7 @@ def common_prefix(self): def __str__(self): """String representation of Session""" - return f"AsyncSession(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" + return f"AsyncSession(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" @property def is_active(self): @@ -360,7 +518,7 @@ async def create_metamessage(self, message: Message, metamessage_type: str, cont Args: message (Message): A message to associate the metamessage with - metamessage_type (str): The type of the metamessage arbitrary itentifier + metamessage_type (str): The type of the metamessage arbitrary identifier content (str): The content of the metamessage Returns: @@ -444,21 +602,21 @@ async def get_metamessages_generator(self, metamessage_type: Optional[str] = Non get_metamessages_page = new_messages - async def update(self, session_data: Dict): - """Update the session_data of a session + async def update(self, metadata: Dict): + """Update the metadata of a session Args: - session_data (Dict): The Session object containing any new session_data + metadata (Dict): The Session object containing any new metadata Returns: boolean: Whether the session was successfully updated """ - info = {"session_data": session_data} + info = {"metadata": metadata} url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = await self.client.put(url, json=info) success = response.status_code < 400 - self.session_data = session_data + self.metadata = metadata return success async def close(self): @@ -468,3 +626,202 @@ async def close(self): response.raise_for_status() self._is_active = False +class AsyncCollection: + """Represents a single collection for a user in an app""" + + def __init__( + self, + client: AsyncClient, + id: uuid.UUID, + user_id: str, + name: str, + created_at: datetime.datetime, + ): + """Constructor for Collection""" + self.base_url: str = client.base_url + self.client: httpx.AsyncClient = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.name: str = name + self.created_at: datetime.datetime = created_at + + @property + def common_prefix(self): + """Shortcut for common API prefix. made a property to prevent tampering""" + return f"{self.base_url}/apps/{self.app_id}" + + def __str__(self): + """String representation of Collection""" + return f"AsyncCollection(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, name={self.name}, created_at={self.created_at})" + + async def update(self, name: str): + """Update the name of the collection + + Args: + name (str): The new name of the document + + Returns: + boolean: Whether the session was successfully updated + """ + info = {"name": name} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = await self.client.put(url, json=info) + response.raise_for_status() + success = response.status_code < 400 + self.name = name + return success + + async def delete(self): + """Delete a collection and all associated documents""" + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = await self.client.delete(url) + response.raise_for_status() + + async def create_document(self, content: str, metadata: Dict = {}): + """Adds a document to the collection + + Args: + content (str): The content of the document + metadata (Dict): The metadata of the document + + Returns: + Document: The Document object of the added document + + """ + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + response = await self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=metadata, + content=content, + created_at=data["created_at"] + ) + + async def get_document(self, document_id: uuid.UUID) -> Document: + """Get a specific document for a collection based on ID + + Args: + document_id (uuid.UUID): The ID of the Document to retrieve + + Returns: + Document: The Document object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document_id}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"] + ) + + async def get_documents(self, page: int = 1, page_size: int = 50) -> AsyncGetDocumentPage: + """Get all documents for a collection + + Args: + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page + + Returns: + AsyncGetDocumentPage: Page of Document objects + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncGetDocumentPage(self, data) + + async def get_documents_generator(self): + """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app + + Yields: + Document: The Document object of the next Document + + """ + page = 1 + page_size = 50 + get_documents_page= await self.get_documents(page, page_size) + while True: + for document in get_documents_page.items: + yield document + + new_documents = await get_documents_page.next() + if not new_documents: + break + + get_documents_page = new_documents + + async def query(self, query: str, top_k: int = 5) -> List[Document]: + """query the documents by cosine distance + Args: + query (str): The query string to compare other embeddings too + top_k (int, optional): The number of results to return. Defaults to 5 max 50 + + Returns: + List[Document]: The response from the query with matching documents + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/query?query={query}&top_k={top_k}" + response = await self.client.get(url) + response.raise_for_status() + data = [ + Document( + collection_id=self.id, + content=document["content"], + id=document["id"], + created_at=document["created_at"], + metadata=document["metadata"] + ) + for document in response.json() + ] + return data + + async def update_document(self, document: Document, content: Optional[str], metadata: Optional[Dict]) -> Document: + """Update a document in the collection + + Args: + document (Document): The Document to update + metadata (Dict): The metadata of the document + content (str): The content of the document + + Returns: + Document: The newly updated Document + """ + if metadata is None and content is None: + raise ValueError("metadata and content cannot both be None") + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = await self.client.put(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"], + collection_id=data["collection_id"], + ) + + async def delete_document(self, document: Document) -> bool: + """Delete a document from the collection + + Args: + document (Document): The Document to delete + + Returns: + boolean: Whether the document was successfully deleted + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = await self.client.delete(url) + response.raise_for_status() + success = response.status_code < 400 + return success diff --git a/sdk/honcho/schemas.py b/sdk/honcho/schemas.py index d2970c9..ee743cb 100644 --- a/sdk/honcho/schemas.py +++ b/sdk/honcho/schemas.py @@ -24,3 +24,15 @@ def __init__(self, id: uuid.UUID, message_id: uuid.UUID, metamessage_type: str, def __str__(self): return f"Metamessage(id={self.id}, message_id={self.message_id}, metamessage_type={self.metamessage_type}, content={self.content})" + +class Document: + def __init__(self, id: uuid.UUID, collection_id: uuid.UUID, content: str, metadata: dict, created_at: datetime.datetime): + """Constructor for Document""" + self.collection_id = collection_id + self.id = id + self.content = content + self.metadata = metadata + self.created_at = created_at + + def __str__(self) -> str: + return f"Document(id={self.id}, metadata={self.metadata}, content={self.content}, created_at={self.created_at})" diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 2adc4e4..b68b95d 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -1,8 +1,8 @@ -import json import uuid -from typing import Dict, Optional +import datetime +from typing import Dict, Optional, List import httpx -from .schemas import Message, Metamessage +from .schemas import Message, Metamessage, Document class GetPage: """Base class for receiving Paginated API results""" @@ -44,7 +44,8 @@ def __init__(self, client, options: Dict, response: Dict): user_id=session["user_id"], location_id=session["location_id"], is_active=session["is_active"], - session_data=session["session_data"], + metadata=session["metadata"], + created_at=session["created_at"], ) for session in response["items"] ] @@ -124,7 +125,70 @@ def next(self): return None return self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) +class GetDocumentPage(GetPage): + """Paginated results for Get Document requests""" + def __init__(self, collection, response: Dict) -> None: + """Constructor for Page Result from Document Get Request + + Args: + collection (Collection): Collection the returned documents are associated with + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.collection = collection + self.items = [ + Document( + id=document["id"], + collection_id=collection.id, + content=document["content"], + metadata=document["metadata"], + created_at=document["created_at"], + ) + for document in response["items"] + ] + def next(self): + """Get the next page of results + Returns: + GetSessionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return self.collection.get_documents(page=self.page + 1, page_size=self.page_size) + +class GetCollectionPage(GetPage): + """Paginated results for Get Collection requests""" + + def __init__(self, client, options: Dict, response: Dict): + """Constructor for page result from Get Collection Request + + Args: + client ( Client): Honcho Client + options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required + response (Dict): Response from API with pagination information + """ + super().__init__(response) + self.client = client + self.user_id = options["user_id"] + self.items = [ + Collection( + client=client, + id=collection["id"], + user_id=collection["user_id"], + name=collection["name"], + created_at=collection["created_at"], + ) + for collection in response["items"] + ] + + def next(self): + """Get the next page of results + Returns: + GetSessionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + """ + if self.page >= self.pages: + return None + return self.client.get_collections(user_id=self.user_id, page=self.page + 1, page_size=self.page_size) class Client: """Honcho API Client Object""" @@ -161,7 +225,8 @@ def get_session(self, user_id: str, session_id: uuid.UUID): user_id=data["user_id"], location_id=data["location_id"], is_active=data["is_active"], - session_data=data["session_data"], + metadata=data["metadata"], + created_at=data["created_at"] ) def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): @@ -215,20 +280,20 @@ def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None get_session_response = new_sessions def create_session( - self, user_id: str, location_id: str = "default", session_data: Dict = {} + self, user_id: str, location_id: str = "default", metadata: Dict = {} ): """Create a session for a user Args: user_id (str): The User ID representing the user, managed by the user location_id (str, optional): Optional Location ID representing the location of a session - session_data (Dict, optional): Optional session metadata + metadata (Dict, optional): Optional session metadata Returns: Session: The Session object of the new Session """ - data = {"location_id": location_id, "session_data": session_data} + data = {"location_id": location_id, "metadata": metadata} url = f"{self.common_prefix}/users/{user_id}/sessions" response = self.client.post(url, json=data) response.raise_for_status() @@ -238,10 +303,103 @@ def create_session( id=data["id"], user_id=user_id, location_id=location_id, - session_data=session_data, + metadata=metadata, is_active=data["is_active"], + created_at=data["created_at"], + ) + + def create_collection( + self, user_id, name: str, + ): + """Create a collection for a user + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): unique name for the collection for the user + + Returns: + Collection: The Collection object of the new Collection + + """ + data = {"name": name} + url = f"{self.common_prefix}/users/{user_id}/collections" + response = self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Collection( + self, + id=data["id"], + user_id=user_id, + name=name, + created_at=data["created_at"], ) + def get_collection(self, user_id: str, name: str): + """Get a specific collection for a user by ID + + Args: + user_id (str): The User ID representing the user, managed by the user + name (str): The name of the collection to get + + Returns: + Collection: The Session object of the requested Session + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/name/{name}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return Collection( + client=self, + id=data["id"], + user_id=data["user_id"], + name=data["name"], + created_at=data["created_at"] + ) + + def get_collections(self, user_id: str, page: int = 1, page_size: int = 50): + """Return collections associated with a user paginated + + Args: + user_id (str): The User ID representing the user + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return + + Returns: + GetCollectionPage: Page or results for get_collections query + + """ + url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + options = {"user_id": user_id} + return GetCollectionPage(self, options, data) + + def get_collections_generator(self, user_id: str): + """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + + Args: + user_id (str): The User ID representing the user, managed by the user + + Yields: + Collection: The Session object of the requested Session + + """ + page = 1 + page_size = 50 + get_collection_response = self.get_collections(user_id, page, page_size) + while True: + # get_collection_response = self.get_collections(user_id, location_id, page, page_size) + for collection in get_collection_response.items: + yield collection + + new_collections = get_collection_response.next() + if not new_collections: + break + + get_collection_response = new_collections + class Session: """Represents a single session for a user in an app""" @@ -252,20 +410,20 @@ def __init__( id: uuid.UUID, user_id: str, location_id: str, - session_data: dict | str, + metadata: dict, is_active: bool, + created_at ): """Constructor for Session""" - self.base_url = client.base_url - self.client = client.client - self.app_id = client.app_id - self.id = id - self.user_id = user_id - self.location_id = location_id - self.session_data = ( - session_data if isinstance(session_data, dict) else json.loads(session_data) - ) - self._is_active = is_active + self.base_url: str = client.base_url + self.client: httpx.Client = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.location_id: str = location_id + self.metadata: dict = metadata + self._is_active: bool = is_active + self.created_at: datetime.datetime = created_at @property def common_prefix(self): @@ -274,7 +432,7 @@ def common_prefix(self): def __str__(self): """String representation of Session""" - return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, session_data={self.session_data}, is_active={self.is_active})" + return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" @property def is_active(self): @@ -444,21 +602,21 @@ def get_metamessages_generator(self, metamessage_type: Optional[str] = None, mes get_metamessages_page = new_messages - def update(self, session_data: Dict): - """Update the session_data of a session + def update(self, metadata: Dict): + """Update the metadata of a session Args: - session_data (Dict): The Session object containing any new session_data + metadata (Dict): The Session object containing any new metadata Returns: boolean: Whether the session was successfully updated """ - info = {"session_data": session_data} + info = {"metadata": metadata} url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" response = self.client.put(url, json=info) success = response.status_code < 400 - self.session_data = session_data + self.metadata = metadata return success def close(self): @@ -468,3 +626,202 @@ def close(self): response.raise_for_status() self._is_active = False +class Collection: + """Represents a single collection for a user in an app""" + + def __init__( + self, + client: Client, + id: uuid.UUID, + user_id: str, + name: str, + created_at: datetime.datetime, + ): + """Constructor for Collection""" + self.base_url: str = client.base_url + self.client: httpx.Client = client.client + self.app_id: str = client.app_id + self.id: uuid.UUID = id + self.user_id: str = user_id + self.name: str = name + self.created_at: datetime.datetime = created_at + + @property + def common_prefix(self): + """Shortcut for common API prefix. made a property to prevent tampering""" + return f"{self.base_url}/apps/{self.app_id}" + + def __str__(self): + """String representation of Collection""" + return f"Collection(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, name={self.name}, created_at={self.created_at})" + + def update(self, name: str): + """Update the name of the collection + + Args: + name (str): The new name of the document + + Returns: + boolean: Whether the session was successfully updated + """ + info = {"name": name} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = self.client.put(url, json=info) + response.raise_for_status() + success = response.status_code < 400 + self.name = name + return success + + def delete(self): + """Delete a collection and all associated documents""" + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" + response = self.client.delete(url) + response.raise_for_status() + + def create_document(self, content: str, metadata: Dict = {}): + """Adds a document to the collection + + Args: + metadata (Dict): The metadata of the document + content (str): The content of the document + + Returns: + Document: The Document object of the added document + + """ + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + response = self.client.post(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=metadata, + content=content, + created_at=data["created_at"] + ) + + def get_document(self, document_id: uuid.UUID) -> Document: + """Get a specific document for a collection based on ID + + Args: + document_id (uuid.UUID): The ID of the Document to retrieve + + Returns: + Document: The Document object + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document_id}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return Document( + collection_id=self.id, + id=data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"] + ) + + def get_documents(self, page: int = 1, page_size: int = 50) -> GetDocumentPage: + """Get all documents for a collection + + Args: + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page + + Returns: + GetDocumentPage: Page of Document objects + + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return GetDocumentPage(self, data) + + def get_documents_generator(self): + """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app + + Yields: + Document: The Document object of the next Document + + """ + page = 1 + page_size = 50 + get_documents_page= self.get_documents(page, page_size) + while True: + for document in get_documents_page.items: + yield document + + new_documents = get_documents_page.next() + if not new_documents: + break + + get_documents_page = new_documents + + def query(self, query: str, top_k: int = 5) -> List[Document]: + """query the documents by cosine distance + Args: + query (str): The query to run + top_k (int, optional): The number of results to return. Defaults to 5. + + Returns: + List[Document]: The response from the query with matching documents + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/query?query={query}&top_k={top_k}" + response = self.client.get(url) + response.raise_for_status() + data = [ + Document( + collection_id=self.id, + content=document["content"], + id=document["id"], + created_at=document["created_at"], + metadata=document["metadata"] + ) + for document in response.json() + ] + return data + + def update_document(self, document: Document, metadata: Optional[Dict], content: Optional[str]) -> Document: + """Update a document in the collection + + Args: + document (Document): The Document to update + metadata (Dict): The metadata of the document + content (str): The content of the document + + Returns: + Document: The newly updated Document + """ + if metadata is None and content is None: + raise ValueError("metadata and content cannot both be None") + data = {"metadata": metadata, "content": content} + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = self.client.put(url, json=data) + response.raise_for_status() + data = response.json() + return Document( + data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"], + collection_id=data["collection_id"], + ) + + def delete_document(self, document: Document) -> bool: + """Delete a document from the collection + + Args: + document (Document): The Document to delete + + Returns: + boolean: Whether the document was successfully deleted + """ + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" + response = self.client.delete(url) + response.raise_for_status() + success = response.status_code < 400 + return success diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index ff695b5..1455bca 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "honcho-ai" -version = "0.0.2" +version = "0.0.3" description = "Python Client SDK for Honcho" authors = ["Plastic Labs "] license = "AGPL-3.0" diff --git a/sdk/tests/test_async.py b/sdk/tests/test_async.py index 8e5904d..edd4191 100644 --- a/sdk/tests/test_async.py +++ b/sdk/tests/test_async.py @@ -1,5 +1,5 @@ import pytest -from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncSession, Message, Metamessage +from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncGetDocumentPage, AsyncSession, Message, Metamessage, Document from honcho import AsyncClient as Honcho from uuid import uuid1 @@ -14,7 +14,7 @@ async def test_session_creation_retrieval(): assert retrieved_session.id == created_session.id assert retrieved_session.is_active is True assert retrieved_session.location_id == "default" - assert retrieved_session.session_data == {} + assert retrieved_session.metadata == {} @pytest.mark.asyncio @@ -40,7 +40,7 @@ async def test_session_update(): created_session = await client.create_session(user_id) assert await created_session.update({"foo": "bar"}) retrieved_session = await client.get_session(user_id, created_session.id) - assert retrieved_session.session_data == {"foo": "bar"} + assert retrieved_session.metadata == {"foo": "bar"} @pytest.mark.asyncio @@ -271,4 +271,97 @@ async def test_paginated_metamessages_generator(): await gen.__anext__() +@pytest.mark.asyncio +async def test_collections(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = await client.create_collection(user_id, col_name) + + # Add documents + doc1 = await collection.create_document(content="This is a test of documents - 1", metadata={"foo": "bar"}) + doc2 = await collection.create_document(content="This is a test of documents - 2", metadata={}) + doc3 = await collection.create_document(content="This is a test of documents - 3", metadata={}) + + # Get all documents + page = await collection.get_documents(page=1, page_size=3) + # Verify size + assert page is not None + assert isinstance(page, AsyncGetDocumentPage) + assert len(page.items) == 3 + # delete a doc + result = await collection.delete_document(doc1) + assert result is True + # Get all documents with a generator this time + gen = collection.get_documents_generator() + # Verfy size + item = await gen.__anext__() + item2 = await gen.__anext__() + with pytest.raises(StopAsyncIteration): + await gen.__anext__() + # delete the collection + result = await collection.delete() + # confirm documents are gone + with pytest.raises(Exception): + new_col = await client.get_collection(user_id, "test") + +@pytest.mark.asyncio +async def test_collection_name_collision(): + col_name = str(uuid1()) + new_col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = await client.create_collection(user_id, col_name) + # Make another collection + with pytest.raises(Exception): + await client.create_collection(user_id, col_name) + + # Change the name of original collection + result = await collection.update(new_col_name) + assert result is True + + # Try again to add another collection + collection2 = await client.create_collection(user_id, col_name) + assert collection2 is not None + assert collection2.name == col_name + assert collection.name == new_col_name + + # Get all collections + page = await client.get_collections(user_id) + assert page is not None + assert len(page.items) == 2 + +@pytest.mark.asyncio +async def test_collection_query(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = await client.create_collection(user_id, col_name) + + # Add documents + doc1 = await collection.create_document(content="The user loves puppies", metadata={}) + doc2 = await collection.create_document(content="The user owns a dog", metadata={}) + doc3 = await collection.create_document(content="The user is a doctor", metadata={}) + + result = await collection.query(query="does the user own pets", top_k=2) + + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document) + + doc3 = await collection.update_document(doc3, metadata={"test": "test"}, content="the user has owned pets in the past") + assert doc3 is not None + assert doc3.metadata == {"test": "test"} + assert doc3.content == "the user has owned pets in the past" + + result = await collection.query(query="does the user own pets", top_k=2) + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document) diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index aba875f..f9b32fb 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,5 +1,5 @@ import pytest -from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, Session, Message, Metamessage +from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, Session, Message, Metamessage, GetDocumentPage, Document from honcho import Client as Honcho from uuid import uuid1 @@ -13,7 +13,7 @@ def test_session_creation_retrieval(): assert retrieved_session.id == created_session.id assert retrieved_session.is_active is True assert retrieved_session.location_id == "default" - assert retrieved_session.session_data == {} + assert retrieved_session.metadata == {} def test_session_multiple_retrieval(): @@ -37,7 +37,7 @@ def test_session_update(): created_session = client.create_session(user_id) assert created_session.update({"foo": "bar"}) retrieved_session = client.get_session(user_id, created_session.id) - assert retrieved_session.session_data == {"foo": "bar"} + assert retrieved_session.metadata == {"foo": "bar"} def test_session_deletion(): @@ -257,4 +257,94 @@ def test_paginated_metamessages_generator(): gen.__next__() +def test_collections(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = client.create_collection(user_id, col_name) + + # Add documents + doc1 = collection.create_document(content="This is a test of documents - 1", metadata={"foo": "bar"}) + doc2 = collection.create_document(content="This is a test of documents - 2", metadata={}) + doc3 = collection.create_document(content="This is a test of documents - 3", metadata={}) + + # Get all documents + page = collection.get_documents(page=1, page_size=3) + # Verify size + assert page is not None + assert isinstance(page, GetDocumentPage) + assert len(page.items) == 3 + # delete a doc + result = collection.delete_document(doc1) + assert result is True + # Get all documents with a generator this time + gen = collection.get_documents_generator() + # Verfy size + item = gen.__next__() + item2 = gen.__next__() + with pytest.raises(StopIteration): + gen.__next__() + # delete the collection + result = collection.delete() + # confirm documents are gone + with pytest.raises(Exception): + new_col = client.get_collection(user_id, "test") + +def test_collection_name_collision(): + col_name = str(uuid1()) + new_col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = client.create_collection(user_id, col_name) + # Make another collection + with pytest.raises(Exception): + client.create_collection(user_id, col_name) + + # Change the name of original collection + result = collection.update(new_col_name) + assert result is True + + # Try again to add another collection + collection2 = client.create_collection(user_id, col_name) + assert collection2 is not None + assert collection2.name == col_name + assert collection.name == new_col_name + + # Get all collections + page = client.get_collections(user_id) + assert page is not None + assert len(page.items) == 2 + +def test_collection_query(): + col_name = str(uuid1()) + app_id = str(uuid1()) + user_id = str(uuid1()) + client = Honcho(app_id, "http://localhost:8000") + # Make a collection + collection = client.create_collection(user_id, col_name) + + # Add documents + doc1 = collection.create_document(content="The user loves puppies", metadata={}) + doc2 = collection.create_document(content="The user owns a dog", metadata={}) + doc3 = collection.create_document(content="The user is a doctor", metadata={}) + + result = collection.query(query="does the user own pets", top_k=2) + + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document) + + doc3 = collection.update_document(doc3, metadata={"test": "test"}, content="the user has owned pets in the past") + assert doc3 is not None + assert doc3.metadata == {"test": "test"} + assert doc3.content == "the user has owned pets in the past" + + result = collection.query(query="does the user own pets", top_k=2) + assert result is not None + assert len(result) == 2 + assert isinstance(result[0], Document) From 614242b2e969002a7ecfa0e7e0d21c065186d0c2 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Tue, 20 Feb 2024 22:52:21 -0800 Subject: [PATCH 28/46] Add reverse parameters for paginated routes --- api/src/crud.py | 257 +++++++++++++++++++------ api/src/main.py | 382 ++++++++++++++++++++++++++++---------- sdk/honcho/client.py | 328 +++++++++++++++++++++----------- sdk/honcho/sync_client.py | 328 +++++++++++++++++++++----------- 4 files changed, 927 insertions(+), 368 deletions(-) diff --git a/api/src/crud.py b/api/src/crud.py index 9b8fa0c..e18c0ff 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -12,29 +12,46 @@ openai_client = OpenAI() -def get_session(db: Session, app_id: str, session_id: uuid.UUID, user_id: Optional[str] = None) -> Optional[models.Session]: - stmt = select(models.Session).where(models.Session.app_id == app_id).where(models.Session.id == session_id) + +def get_session( + db: Session, app_id: str, session_id: uuid.UUID, user_id: Optional[str] = None +) -> Optional[models.Session]: + stmt = ( + select(models.Session) + .where(models.Session.app_id == app_id) + .where(models.Session.id == session_id) + ) if user_id is not None: stmt = stmt.where(models.Session.user_id == user_id) session = db.scalars(stmt).one_or_none() return session + def get_sessions( - db: Session, app_id: str, user_id: str, location_id: str | None = None + db: Session, + app_id: str, + user_id: str, + location_id: Optional[str] = None, + reverse: Optional[bool] = False, ) -> Select: stmt = ( select(models.Session) .where(models.Session.app_id == app_id) .where(models.Session.user_id == user_id) .where(models.Session.is_active.is_(True)) - .order_by(models.Session.created_at) ) + if reverse: + stmt = stmt.order_by(models.Session.created_at.desc()) + else: + stmt = stmt.order_by(models.Session.created_at) + if location_id is not None: stmt = stmt.where(models.Session.location_id == location_id) return stmt + def create_session( db: Session, session: schemas.SessionCreate, app_id: str, user_id: str ) -> models.Session: @@ -51,18 +68,29 @@ def create_session( def update_session( - db: Session, session: schemas.SessionUpdate, app_id: str, user_id: str, session_id: uuid.UUID + db: Session, + session: schemas.SessionUpdate, + app_id: str, + user_id: str, + session_id: uuid.UUID, ) -> bool: - honcho_session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) + honcho_session = get_session( + db, app_id=app_id, session_id=session_id, user_id=user_id + ) if honcho_session is None: raise ValueError("Session not found or does not belong to user") - if session.metadata is not None: # Need to explicitly be there won't make it empty by default + if ( + session.metadata is not None + ): # Need to explicitly be there won't make it empty by default honcho_session.h_metadata = session.metadata db.commit() db.refresh(honcho_session) return honcho_session -def delete_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID) -> bool: + +def delete_session( + db: Session, app_id: str, user_id: str, session_id: uuid.UUID +) -> bool: stmt = ( select(models.Session) .where(models.Session.id == session_id) @@ -76,10 +104,17 @@ def delete_session(db: Session, app_id: str, user_id: str, session_id: uuid.UUID db.commit() return True + def create_message( - db: Session, message: schemas.MessageCreate, app_id: str, user_id: str, session_id: uuid.UUID + db: Session, + message: schemas.MessageCreate, + app_id: str, + user_id: str, + session_id: uuid.UUID, ) -> models.Message: - honcho_session = get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) + honcho_session = get_session( + db, app_id=app_id, session_id=session_id, user_id=user_id + ) if honcho_session is None: raise ValueError("Session not found or does not belong to user") @@ -93,8 +128,13 @@ def create_message( db.refresh(honcho_message) return honcho_message + def get_messages( - db: Session, app_id: str, user_id: str, session_id: uuid.UUID + db: Session, + app_id: str, + user_id: str, + session_id: uuid.UUID, + reverse: Optional[bool] = False, ) -> Select: stmt = ( select(models.Message) @@ -102,12 +142,18 @@ def get_messages( .where(models.Session.app_id == app_id) .where(models.Session.user_id == user_id) .where(models.Message.session_id == session_id) - .order_by(models.Message.created_at) ) + + if reverse: + stmt = stmt.order_by(models.Message.created_at.desc()) + else: + stmt = stmt.order_by(models.Message.created_at) + return stmt + def get_message( - db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID + db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID ) -> Optional[models.Message]: stmt = ( select(models.Message) @@ -116,15 +162,24 @@ def get_message( .where(models.Session.user_id == user_id) .where(models.Message.session_id == session_id) .where(models.Message.id == message_id) - ) return db.scalars(stmt).one_or_none() + ######################################################## # metamessage methods ######################################################## -def get_metamessages(db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: Optional[uuid.UUID], metamessage_type: Optional[str] = None) -> Select: + +def get_metamessages( + db: Session, + app_id: str, + user_id: str, + session_id: uuid.UUID, + message_id: Optional[uuid.UUID], + metamessage_type: Optional[str] = None, + reverse: Optional[bool] = False, +) -> Select: stmt = ( select(models.Metamessage) .join(models.Message, models.Message.id == models.Metamessage.message_id) @@ -132,19 +187,32 @@ def get_metamessages(db: Session, app_id: str, user_id: str, session_id: uuid.UU .where(models.Session.app_id == app_id) .where(models.Session.user_id == user_id) .where(models.Message.session_id == session_id) - .order_by(models.Metamessage.created_at) ) + if message_id is not None: stmt = stmt.where(models.Metamessage.message_id == message_id) + if metamessage_type is not None: stmt = stmt.where(models.Metamessage.metamessage_type == metamessage_type) + + if reverse: + stmt = stmt.order_by(models.Metamessage.created_at.desc()) + else: + stmt = stmt.order_by(models.Metamessage.created_at) + return stmt + def get_metamessage( - db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID, metamessage_id: uuid.UUID -) -> Optional[models.Metamessage]: + db: Session, + app_id: str, + user_id: str, + session_id: uuid.UUID, + message_id: uuid.UUID, + metamessage_id: uuid.UUID, +) -> Optional[models.Metamessage]: stmt = ( - select(models.Metamessage) + select(models.Metamessage) .join(models.Message, models.Message.id == models.Metamessage.message_id) .join(models.Session, models.Message.session_id == models.Session.id) .where(models.Session.app_id == app_id) @@ -152,10 +220,10 @@ def get_metamessage( .where(models.Message.session_id == session_id) .where(models.Metamessage.message_id == message_id) .where(models.Metamessage.id == metamessage_id) - ) return db.scalars(stmt).one_or_none() + def create_metamessage( db: Session, metamessage: schemas.MetamessageCreate, @@ -163,7 +231,13 @@ def create_metamessage( user_id: str, session_id: uuid.UUID, ): - message = get_message(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=metamessage.message_id) + message = get_message( + db, + app_id=app_id, + session_id=session_id, + user_id=user_id, + message_id=metamessage.message_id, + ) if message is None: raise ValueError("Session not found or does not belong to user") @@ -178,24 +252,36 @@ def create_metamessage( db.refresh(honcho_metamessage) return honcho_metamessage + ######################################################## # collection methods ######################################################## # Should be very similar to the session methods -def get_collections(db: Session, app_id: str, user_id: str) -> Select: + +def get_collections( + db: Session, app_id: str, user_id: str, reverse: Optional[bool] = False +) -> Select: """Get a distinct list of the names of collections associated with a user""" stmt = ( select(models.Collection) .where(models.Collection.app_id == app_id) .where(models.Collection.user_id == user_id) - .order_by(models.Collection.created_at) ) + + if reverse: + stmt = stmt.order_by(models.Collection.created_at.desc()) + else: + stmt = stmt.order_by(models.Collection.created_at) + return stmt -def get_collection_by_id(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID) -> Optional[models.Collection]: - stmt = ( + +def get_collection_by_id( + db: Session, app_id: str, user_id: str, collection_id: uuid.UUID +) -> Optional[models.Collection]: + stmt = ( select(models.Collection) .where(models.Collection.app_id == app_id) .where(models.Collection.user_id == user_id) @@ -204,8 +290,11 @@ def get_collection_by_id(db: Session, app_id: str, user_id: str, collection_id: collection = db.scalars(stmt).one_or_none() return collection -def get_collection_by_name(db: Session, app_id: str, user_id: str, name: str) -> Optional[models.Collection]: - stmt = ( + +def get_collection_by_name( + db: Session, app_id: str, user_id: str, name: str +) -> Optional[models.Collection]: + stmt = ( select(models.Collection) .where(models.Collection.app_id == app_id) .where(models.Collection.user_id == user_id) @@ -214,6 +303,7 @@ def get_collection_by_name(db: Session, app_id: str, user_id: str, name: str) -> collection = db.scalars(stmt).one_or_none() return collection + def create_collection( db: Session, collection: schemas.CollectionCreate, app_id: str, user_id: str ) -> models.Collection: @@ -231,10 +321,17 @@ def create_collection( db.refresh(honcho_collection) return honcho_collection + def update_collection( - db: Session, collection: schemas.CollectionUpdate, app_id: str, user_id: str, collection_id: uuid.UUID + db: Session, + collection: schemas.CollectionUpdate, + app_id: str, + user_id: str, + collection_id: uuid.UUID, ) -> models.Collection: - honcho_collection = get_collection_by_id(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + honcho_collection = get_collection_by_id( + db, app_id=app_id, user_id=user_id, collection_id=collection_id + ) if honcho_collection is None: raise ValueError("collection not found or does not belong to user") try: @@ -246,6 +343,7 @@ def update_collection( db.refresh(honcho_collection) return honcho_collection + def delete_collection( db: Session, app_id: str, user_id: str, collection_id: uuid.UUID ) -> bool: @@ -266,14 +364,20 @@ def delete_collection( db.commit() return True + ######################################################## # document methods ######################################################## # Should be similar to the messages methods outside of query + def get_documents( - db: Session, app_id: str, user_id: str, collection_id: uuid.UUID + db: Session, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + reverse: Optional[bool] = False, ) -> Select: stmt = ( select(models.Document) @@ -281,14 +385,24 @@ def get_documents( .where(models.Collection.app_id == app_id) .where(models.Collection.user_id == user_id) .where(models.Document.collection_id == collection_id) - .order_by(models.Document.created_at) ) + + if reverse: + stmt = stmt.order_by(models.Document.created_at.desc()) + else: + stmt = stmt.order_by(models.Document.created_at) + return stmt + def get_document( - db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID + db: Session, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, ) -> Optional[models.Document]: - stmt = ( + stmt = ( select(models.Document) .join(models.Collection, models.Collection.id == models.Document.collection_id) .where(models.Collection.app_id == app_id) @@ -301,36 +415,48 @@ def get_document( return document -def query_documents(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, query: str, top_k: int = 5) -> Sequence[models.Document]: +def query_documents( + db: Session, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + query: str, + top_k: int = 5, +) -> Sequence[models.Document]: response = openai_client.embeddings.create( - input=query, - model="text-embedding-3-small" + input=query, model="text-embedding-3-small" ) embedding_query = response.data[0].embedding stmt = ( - select(models.Document) - .join(models.Collection, models.Collection.id == models.Document.collection_id) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) - .where(models.Document.collection_id == collection_id) - .order_by(models.Document.embedding.cosine_distance(embedding_query)) - .limit(top_k) - ) + select(models.Document) + .join(models.Collection, models.Collection.id == models.Document.collection_id) + .where(models.Collection.app_id == app_id) + .where(models.Collection.user_id == user_id) + .where(models.Document.collection_id == collection_id) + .order_by(models.Document.embedding.cosine_distance(embedding_query)) + .limit(top_k) + ) # if metadata is not None: - # stmt = stmt.where(models.Document.h_metadata.contains(metadata)) + # stmt = stmt.where(models.Document.h_metadata.contains(metadata)) return db.scalars(stmt).all() + def create_document( - db: Session, document: schemas.DocumentCreate, app_id: str, user_id: str, collection_id: uuid.UUID + db: Session, + document: schemas.DocumentCreate, + app_id: str, + user_id: str, + collection_id: uuid.UUID, ) -> models.Document: """Embed a message as a vector and create a document""" - collection = get_collection_by_id(db, app_id=app_id, collection_id=collection_id, user_id=user_id) + collection = get_collection_by_id( + db, app_id=app_id, collection_id=collection_id, user_id=user_id + ) if collection is None: raise ValueError("Session not found or does not belong to user") response = openai_client.embeddings.create( - input=document.content, - model="text-embedding-3-small" + input=document.content, model="text-embedding-3-small" ) embedding = response.data[0].embedding @@ -339,25 +465,36 @@ def create_document( collection_id=collection_id, content=document.content, h_metadata=document.metadata, - embedding=embedding + embedding=embedding, ) db.add(honcho_document) db.commit() db.refresh(honcho_document) return honcho_document + def update_document( - db: Session, document: schemas.DocumentUpdate, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID + db: Session, + document: schemas.DocumentUpdate, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, ) -> bool: - honcho_document = get_document(db, app_id=app_id, collection_id=collection_id, user_id=user_id, document_id=document_id) + honcho_document = get_document( + db, + app_id=app_id, + collection_id=collection_id, + user_id=user_id, + document_id=document_id, + ) if honcho_document is None: raise ValueError("Session not found or does not belong to user") if document.content is not None: honcho_document.content = document.content response = openai_client.embeddings.create( - input=document.content, - model="text-embedding-3-small" - ) + input=document.content, model="text-embedding-3-small" + ) embedding = response.data[0].embedding honcho_document.embedding = embedding honcho_document.created_at = datetime.datetime.now() @@ -368,7 +505,14 @@ def update_document( db.refresh(honcho_document) return honcho_document -def delete_document(db: Session, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID) -> bool: + +def delete_document( + db: Session, + app_id: str, + user_id: str, + collection_id: uuid.UUID, + document_id: uuid.UUID, +) -> bool: stmt = ( select(models.Document) .join(models.Collection, models.Collection.id == models.Document.collection_id) @@ -383,4 +527,3 @@ def delete_document(db: Session, app_id: str, user_id: str, collection_id: uuid. db.delete(document) db.commit() return True - diff --git a/api/src/main.py b/api/src/main.py index 7a16e9c..0c658e9 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -13,7 +13,7 @@ from . import crud, models, schemas from .db import SessionLocal, engine -models.Base.metadata.create_all(bind=engine) # Scaffold Database if not already done +models.Base.metadata.create_all(bind=engine) # Scaffold Database if not already done app = FastAPI() @@ -30,6 +30,7 @@ add_pagination(app) + def get_db(): """FastAPI Dependency Generator for Database""" db = SessionLocal() @@ -38,17 +39,20 @@ def get_db(): finally: db.close() + ######################################################## # Session Routes ######################################################## + @router.get("/sessions", response_model=Page[schemas.Session]) def get_sessions( request: Request, app_id: str, user_id: str, location_id: Optional[str] = None, - db: Session = Depends(get_db) + reverse: Optional[bool] = False, + db: Session = Depends(get_db), ): """Get All Sessions for a User @@ -58,18 +62,27 @@ def get_sessions( location_id (str, optional): Optional Location ID representing the location of a session Returns: - list[schemas.Session]: List of Session objects + list[schemas.Session]: List of Session objects """ - return paginate(db, crud.get_sessions(db, app_id=app_id, user_id=user_id, location_id=location_id)) + return paginate( + db, + crud.get_sessions( + db, app_id=app_id, user_id=user_id, location_id=location_id, reverse=reverse + ), + ) @router.post("/sessions", response_model=schemas.Session) def create_session( - request: Request, app_id: str, user_id: str, session: schemas.SessionCreate, db: Session = Depends(get_db) + request: Request, + app_id: str, + user_id: str, + session: schemas.SessionCreate, + db: Session = Depends(get_db), ): """Create a Session for a User - + Args: app_id (str): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user @@ -77,22 +90,23 @@ def create_session( Returns: schemas.Session: The Session object of the new Session - + """ value = crud.create_session(db, app_id=app_id, user_id=user_id, session=session) return value + @router.put("/sessions/{session_id}", response_model=schemas.Session) def update_session( - request: Request, + request: Request, app_id: str, user_id: str, session_id: uuid.UUID, session: schemas.SessionUpdate, db: Session = Depends(get_db), - ): +): """Update the metadata of a Session - + Args: app_id (str): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user @@ -104,20 +118,25 @@ def update_session( """ if session.metadata is None: - raise HTTPException(status_code=400, detail="Session metadata cannot be empty") # TODO TEST if I can set the metadata to be blank with this + raise HTTPException( + status_code=400, detail="Session metadata cannot be empty" + ) # TODO TEST if I can set the metadata to be blank with this try: - return crud.update_session(db, app_id=app_id, user_id=user_id, session_id=session_id, session=session) + return crud.update_session( + db, app_id=app_id, user_id=user_id, session_id=session_id, session=session + ) except ValueError: raise HTTPException(status_code=404, detail="Session not found") + @router.delete("/sessions/{session_id}") def delete_session( - request: Request, + request: Request, app_id: str, user_id: str, session_id: uuid.UUID, db: Session = Depends(get_db), - ): +): """Delete a session by marking it as inactive Args: @@ -132,14 +151,23 @@ def delete_session( HTTPException: If the session is not found """ - response = crud.delete_session(db, app_id=app_id, user_id=user_id, session_id=session_id) + response = crud.delete_session( + db, app_id=app_id, user_id=user_id, session_id=session_id + ) if response: return {"message": "Session deleted successfully"} else: raise HTTPException(status_code=404, detail="Session not found") + @router.get("/sessions/{session_id}", response_model=schemas.Session) -def get_session(request: Request, app_id: str, user_id: str, session_id: uuid.UUID, db: Session = Depends(get_db)): +def get_session( + request: Request, + app_id: str, + user_id: str, + session_id: uuid.UUID, + db: Session = Depends(get_db), +): """Get a specific session for a user by ID Args: @@ -147,27 +175,28 @@ def get_session(request: Request, app_id: str, user_id: str, session_id: uuid.UU user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to retrieve - Returns: + Returns: schemas.Session: The Session object of the requested Session Raises: HTTPException: If the session is not found """ - honcho_session = crud.get_session(db, app_id=app_id, session_id=session_id, user_id=user_id) + honcho_session = crud.get_session( + db, app_id=app_id, session_id=session_id, user_id=user_id + ) if honcho_session is None: raise HTTPException(status_code=404, detail="Session not found") return honcho_session + ######################################################## # Message Routes ######################################################## -@router.post( - "/sessions/{session_id}/messages", - response_model=schemas.Message -) + +@router.post("/sessions/{session_id}/messages", response_model=schemas.Message) def create_message_for_session( - request: Request, + request: Request, app_id: str, user_id: str, session_id: uuid.UUID, @@ -190,27 +219,29 @@ def create_message_for_session( """ try: - return crud.create_message(db, message=message, app_id=app_id, user_id=user_id, session_id=session_id) + return crud.create_message( + db, message=message, app_id=app_id, user_id=user_id, session_id=session_id + ) except ValueError: raise HTTPException(status_code=404, detail="Session not found") -@router.get( - "/sessions/{session_id}/messages", - response_model=Page[schemas.Message] -) -def get_messages_for_session( - request: Request, + +@router.get("/sessions/{session_id}/messages", response_model=Page[schemas.Message]) +def get_messages( + request: Request, app_id: str, user_id: str, session_id: uuid.UUID, + reverse: Optional[bool] = False, db: Session = Depends(get_db), ): """Get all messages for a session - + Args: app_id (str): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to retrieve + reverse (bool): Whether to reverse the order of the messages Returns: list[schemas.Message]: List of Message objects @@ -219,14 +250,23 @@ def get_messages_for_session( HTTPException: If the session is not found """ - try: - return paginate(db, crud.get_messages(db, app_id=app_id, user_id=user_id, session_id=session_id)) + try: + return paginate( + db, + crud.get_messages( + db, + app_id=app_id, + user_id=user_id, + session_id=session_id, + reverse=reverse, + ), + ) except ValueError: raise HTTPException(status_code=404, detail="Session not found") + @router.get( - "sessions/{session_id}/messages/{message_id}", - response_model=schemas.Message + "sessions/{session_id}/messages/{message_id}", response_model=schemas.Message ) def get_message( request: Request, @@ -236,24 +276,23 @@ def get_message( message_id: uuid.UUID, db: Session = Depends(get_db), ): - """ - - """ - honcho_message = crud.get_message(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=message_id) + """ """ + honcho_message = crud.get_message( + db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=message_id + ) if honcho_message is None: raise HTTPException(status_code=404, detail="Session not found") return honcho_message + ######################################################## # metamessage routes ######################################################## -@router.post( - "/sessions/{session_id}/metamessages", - response_model=schemas.Metamessage -) + +@router.post("/sessions/{session_id}/metamessages", response_model=schemas.Metamessage) def create_metamessage( - request: Request, + request: Request, app_id: str, user_id: str, session_id: uuid.UUID, @@ -276,29 +315,37 @@ def create_metamessage( """ try: - return crud.create_metamessage(db, metamessage=metamessage, app_id=app_id, user_id=user_id, session_id=session_id) + return crud.create_metamessage( + db, + metamessage=metamessage, + app_id=app_id, + user_id=user_id, + session_id=session_id, + ) except ValueError: raise HTTPException(status_code=404, detail="Session not found") + @router.get( - "/sessions/{session_id}/metamessages", - response_model=Page[schemas.Metamessage] + "/sessions/{session_id}/metamessages", response_model=Page[schemas.Metamessage] ) def get_metamessages( - request: Request, + request: Request, app_id: str, user_id: str, session_id: uuid.UUID, - message_id: Optional[uuid.UUID] = None, + message_id: Optional[uuid.UUID] = None, metamessage_type: Optional[str] = None, + reverse: Optional[bool] = False, db: Session = Depends(get_db), ): """Get all messages for a session - + Args: app_id (str): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to retrieve + reverse (bool): Whether to reverse the order of the metamessages Returns: list[schemas.Message]: List of Message objects @@ -307,13 +354,36 @@ def get_metamessages( HTTPException: If the session is not found """ - try: - return paginate(db, crud.get_metamessages(db, app_id=app_id, user_id=user_id, session_id=session_id, message_id=message_id, metamessage_type=metamessage_type)) + try: + return paginate( + db, + crud.get_metamessages( + db, + app_id=app_id, + user_id=user_id, + session_id=session_id, + message_id=message_id, + metamessage_type=metamessage_type, + reverse=reverse, + ), + ) except ValueError: raise HTTPException(status_code=404, detail="Session not found") -@router.get("/sessions/{session_id}/metamessages/{metamessage_id}", response_model=schemas.Metamessage) -def get_metamessage(request: Request, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID, metamessage_id: uuid.UUID, db: Session = Depends(get_db)): + +@router.get( + "/sessions/{session_id}/metamessages/{metamessage_id}", + response_model=schemas.Metamessage, +) +def get_metamessage( + request: Request, + app_id: str, + user_id: str, + session_id: uuid.UUID, + message_id: uuid.UUID, + metamessage_id: uuid.UUID, + db: Session = Depends(get_db), +): """Get a specific session for a user by ID Args: @@ -321,29 +391,42 @@ def get_metamessage(request: Request, app_id: str, user_id: str, session_id: uui user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to retrieve - Returns: + Returns: schemas.Session: The Session object of the requested Session Raises: HTTPException: If the session is not found """ - honcho_metamessage = crud.get_metamessage(db, app_id=app_id, session_id=session_id, user_id=user_id, message_id=message_id, metamessage_id=metamessage_id) + honcho_metamessage = crud.get_metamessage( + db, + app_id=app_id, + session_id=session_id, + user_id=user_id, + message_id=message_id, + metamessage_id=metamessage_id, + ) if honcho_metamessage is None: raise HTTPException(status_code=404, detail="Session not found") return honcho_metamessage + ######################################################## # collection routes ######################################################## + @router.get("/collections/all", response_model=Page[schemas.Collection]) def get_collections( request: Request, app_id: str, user_id: str, + reverse: Optional[bool] = False, db: Session = Depends(get_db), ): - return paginate(db, crud.get_collections(db, app_id=app_id, user_id=user_id)) + return paginate( + db, crud.get_collections(db, app_id=app_id, user_id=user_id, reverse=reverse) + ) + @router.get("/collections/id/{collection_id}", response_model=schemas.Collection) def get_collection_by_id( @@ -351,38 +434,54 @@ def get_collection_by_id( app_id: str, user_id: str, collection_id: uuid.UUID, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ) -> schemas.Collection: - honcho_collection = crud.get_collection_by_id(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + honcho_collection = crud.get_collection_by_id( + db, app_id=app_id, user_id=user_id, collection_id=collection_id + ) if honcho_collection is None: - raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + raise HTTPException( + status_code=404, detail="collection not found or does not belong to user" + ) return honcho_collection + @router.get("/collections/name/{name}", response_model=schemas.Collection) def get_collection_by_name( request: Request, app_id: str, user_id: str, name: str, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ) -> schemas.Collection: - honcho_collection = crud.get_collection_by_name(db, app_id=app_id, user_id=user_id, name=name) + honcho_collection = crud.get_collection_by_name( + db, app_id=app_id, user_id=user_id, name=name + ) if honcho_collection is None: - raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + raise HTTPException( + status_code=404, detail="collection not found or does not belong to user" + ) return honcho_collection + @router.post("/collections", response_model=schemas.Collection) def create_collection( request: Request, app_id: str, user_id: str, collection: schemas.CollectionCreate, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): try: - return crud.create_collection(db, collection=collection, app_id=app_id, user_id=user_id) + return crud.create_collection( + db, collection=collection, app_id=app_id, user_id=user_id + ) except ValueError: - raise HTTPException(status_code=406, detail="Error invalid collection configuration - name may already exist") + raise HTTPException( + status_code=406, + detail="Error invalid collection configuration - name may already exist", + ) + @router.put("/collections/{collection_id}", response_model=schemas.Collection) def update_collection( @@ -391,63 +490,113 @@ def update_collection( user_id: str, collection_id: uuid.UUID, collection: schemas.CollectionUpdate, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): if collection.name is None: - raise HTTPException(status_code=400, detail="invalid request - name cannot be None") + raise HTTPException( + status_code=400, detail="invalid request - name cannot be None" + ) try: - honcho_collection = crud.update_collection(db, collection=collection, app_id=app_id, user_id=user_id, collection_id=collection_id) + honcho_collection = crud.update_collection( + db, + collection=collection, + app_id=app_id, + user_id=user_id, + collection_id=collection_id, + ) except ValueError: - raise HTTPException(status_code=406, detail="Error invalid collection configuration - name may already exist") + raise HTTPException( + status_code=406, + detail="Error invalid collection configuration - name may already exist", + ) return honcho_collection + @router.delete("/collections/{collection_id}") def delete_collection( request: Request, app_id: str, user_id: str, collection_id: uuid.UUID, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): - response = crud.delete_collection(db, app_id=app_id, user_id=user_id, collection_id=collection_id) + response = crud.delete_collection( + db, app_id=app_id, user_id=user_id, collection_id=collection_id + ) if response: return {"message": "Collection deleted successfully"} else: - raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + raise HTTPException( + status_code=404, detail="collection not found or does not belong to user" + ) + ######################################################## # Document routes ######################################################## -@router.get("/collections/{collection_id}/documents", response_model=Page[schemas.Document]) + +@router.get( + "/collections/{collection_id}/documents", response_model=Page[schemas.Document] +) def get_documents( request: Request, app_id: str, user_id: str, collection_id: uuid.UUID, - db: Session = Depends(get_db) + reverse: Optional[bool] = False, + db: Session = Depends(get_db), ): try: - return paginate(db, crud.get_documents(db, app_id=app_id, user_id=user_id, collection_id=collection_id)) - except ValueError: # TODO can probably remove this exception ok to return empty here - raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + return paginate( + db, + crud.get_documents( + db, + app_id=app_id, + user_id=user_id, + collection_id=collection_id, + reverse=reverse, + ), + ) + except ( + ValueError + ): # TODO can probably remove this exception ok to return empty here + raise HTTPException( + status_code=404, detail="collection not found or does not belong to user" + ) + + +router.get( + "/collections/{collection_id}/documents/{document_id}", + response_model=schemas.Document, +) + -router.get("/collections/{collection_id}/documents/{document_id}", response_model=schemas.Document) def get_document( request: Request, app_id: str, user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): - honcho_document = crud.get_document(db, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + honcho_document = crud.get_document( + db, + app_id=app_id, + user_id=user_id, + collection_id=collection_id, + document_id=document_id, + ) if honcho_document is None: - raise HTTPException(status_code=404, detail="document not found or does not belong to user") + raise HTTPException( + status_code=404, detail="document not found or does not belong to user" + ) return honcho_document -@router.get("/collections/{collection_id}/query", response_model=Sequence[schemas.Document]) +@router.get( + "/collections/{collection_id}/query", response_model=Sequence[schemas.Document] +) def query_documents( request: Request, app_id: str, @@ -455,11 +604,19 @@ def query_documents( collection_id: uuid.UUID, query: str, top_k: int = 5, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): if top_k is not None and top_k > 50: - top_k = 50 # TODO see if we need to paginate this - return crud.query_documents(db=db, app_id=app_id, user_id=user_id, collection_id=collection_id, query=query, top_k=top_k) + top_k = 50 # TODO see if we need to paginate this + return crud.query_documents( + db=db, + app_id=app_id, + user_id=user_id, + collection_id=collection_id, + query=query, + top_k=top_k, + ) + @router.post("/collections/{collection_id}/documents", response_model=schemas.Document) def create_document( @@ -468,14 +625,26 @@ def create_document( user_id: str, collection_id: uuid.UUID, document: schemas.DocumentCreate, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): try: - return crud.create_document(db, document=document, app_id=app_id, user_id=user_id, collection_id=collection_id) + return crud.create_document( + db, + document=document, + app_id=app_id, + user_id=user_id, + collection_id=collection_id, + ) except ValueError: - raise HTTPException(status_code=404, detail="collection not found or does not belong to user") + raise HTTPException( + status_code=404, detail="collection not found or does not belong to user" + ) + -@router.put("/collections/{collection_id}/documents/{document_id}", response_model=schemas.Document) +@router.put( + "/collections/{collection_id}/documents/{document_id}", + response_model=schemas.Document, +) def update_document( request: Request, app_id: str, @@ -483,11 +652,21 @@ def update_document( collection_id: uuid.UUID, document_id: uuid.UUID, document: schemas.DocumentUpdate, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): - if document.content is None and document.metadata is None: - raise HTTPException(status_code=400, detail="content and metadata cannot both be None") - return crud.update_document(db, document=document, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + if document.content is None and document.metadata is None: + raise HTTPException( + status_code=400, detail="content and metadata cannot both be None" + ) + return crud.update_document( + db, + document=document, + app_id=app_id, + user_id=user_id, + collection_id=collection_id, + document_id=document_id, + ) + @router.delete("/collections/{collection_id}/documents/{document_id}") def delete_document( @@ -496,12 +675,21 @@ def delete_document( user_id: str, collection_id: uuid.UUID, document_id: uuid.UUID, - db: Session = Depends(get_db) + db: Session = Depends(get_db), ): - response = crud.delete_document(db, app_id=app_id, user_id=user_id, collection_id=collection_id, document_id=document_id) + response = crud.delete_document( + db, + app_id=app_id, + user_id=user_id, + collection_id=collection_id, + document_id=document_id, + ) if response: return {"message": "Document deleted successfully"} else: - raise HTTPException(status_code=404, detail="document not found or does not belong to user") + raise HTTPException( + status_code=404, detail="document not found or does not belong to user" + ) + app.include_router(router) diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index a21532b..6c8b17d 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -4,8 +4,10 @@ import httpx from .schemas import Message, Metamessage, Document + class AsyncGetPage: """Base class for receiving Paginated API results""" + def __init__(self, response: Dict) -> None: """Constructor for Page with relevant information about the results and pages @@ -16,18 +18,19 @@ def __init__(self, response: Dict) -> None: self.page = response["page"] self.page_size = response["size"] self.pages = response["pages"] - self.items =[] + self.items = [] async def next(self): """Shortcut method to Get the next page of results""" pass + class AsyncGetSessionPage(AsyncGetPage): """Paginated Results for Get Session Requests""" def __init__(self, client, options: Dict, response: Dict): """Constructor for Page Result from Session Get Request - + Args: client (AsyncClient): Honcho Client options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are user_id which is required and location_id which is optional @@ -37,6 +40,7 @@ def __init__(self, client, options: Dict, response: Dict): self.client = client self.user_id = options["user_id"] self.location_id = options["location_id"] + self.reverse = options["reverse"] self.items = [ AsyncSession( client=client, @@ -49,7 +53,7 @@ def __init__(self, client, options: Dict, response: Dict): ) for session in response["items"] ] - + async def next(self): """Get the next page of results Returns: @@ -57,22 +61,30 @@ async def next(self): """ if self.page >= self.pages: return None - return await self.client.get_sessions(self.user_id, self.location_id, self.page + 1, self.page_size) + return await self.client.get_sessions( + user_id=self.user_id, + location_id=self.location_id, + page=(self.page + 1), + page_size=self.page_size, + reverse=self.reverse, + ) + class AsyncGetMessagePage(AsyncGetPage): """Paginated Results for Get Session Requests""" - def __init__(self, session, response: Dict): + def __init__(self, session, options, response: Dict): """Constructor for Page Result from Session Get Request - + Args: session (AsyncSession): Session the returned messages are associated with response (Dict): Response from API with pagination information """ super().__init__(response) self.session = session + self.reverse = options["reverse"] self.items = [ - Message( + Message( session_id=session.id, id=message["id"], is_user=message["is_user"], @@ -89,13 +101,15 @@ async def next(self): """ if self.page >= self.pages: return None - return await self.session.get_messages((self.page + 1), self.page_size) + return await self.session.get_messages( + (self.page + 1), self.page_size, self.reverse + ) + class AsyncGetMetamessagePage(AsyncGetPage): - def __init__(self, session, options: Dict, response: Dict) -> None: """Constructor for Page Result from Metamessage Get Request - + Args: session (AsyncSession): Session the returned messages are associated with options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both optional @@ -104,16 +118,19 @@ def __init__(self, session, options: Dict, response: Dict) -> None: super().__init__(response) self.session = session self.message_id = options["message_id"] if "message_id" in options else None - self.metamessage_type = options["metamessage_type"] if "metamessage_type" in options else None + self.metamessage_type = ( + options["metamessage_type"] if "metamessage_type" in options else None + ) + self.reverse = options["reverse"] self.items = [ - Metamessage( - id=metamessage["id"], - message_id=metamessage["message_id"], - metamessage_type=metamessage["metamessage_type"], - content=metamessage["content"], - created_at=metamessage["created_at"], - ) - for metamessage in response["items"] + Metamessage( + id=metamessage["id"], + message_id=metamessage["message_id"], + metamessage_type=metamessage["metamessage_type"], + content=metamessage["content"], + created_at=metamessage["created_at"], + ) + for metamessage in response["items"] ] async def next(self): @@ -123,19 +140,28 @@ async def next(self): """ if self.page >= self.pages: return None - return await self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) + return await self.session.get_metamessages( + metamessage_type=self.metamessage_type, + message=self.message_id, + page=(self.page + 1), + page_size=self.page_size, + reverse=self.reverse, + ) + class AsyncGetDocumentPage(AsyncGetPage): """Paginated results for Get Document requests""" - def __init__(self, collection, response: Dict) -> None: + + def __init__(self, collection, options, response: Dict) -> None: """Constructor for Page Result from Document Get Request - + Args: collection (AsyncCollection): Collection the returned documents are associated with response (Dict): Response from API with pagination information """ super().__init__(response) self.collection = collection + self.reverse = options["reverse"] self.items = [ Document( id=document["id"], @@ -143,7 +169,7 @@ def __init__(self, collection, response: Dict) -> None: content=document["content"], metadata=document["metadata"], created_at=document["created_at"], - ) + ) for document in response["items"] ] @@ -154,14 +180,17 @@ async def next(self): """ if self.page >= self.pages: return None - return await self.collection.get_documents(page=self.page + 1, page_size=self.page_size) + return await self.collection.get_documents( + page=self.page + 1, page_size=self.page_size, reverse=self.reverse + ) + class AsyncGetCollectionPage(AsyncGetPage): """Paginated results for Get Collection requests""" def __init__(self, client, options: Dict, response: Dict): """Constructor for page result from Get Collection Request - + Args: client (Async Client): Honcho Client options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required @@ -170,6 +199,7 @@ def __init__(self, client, options: Dict, response: Dict): super().__init__(response) self.client = client self.user_id = options["user_id"] + self.reverse = options["reverse"] self.items = [ AsyncCollection( client=client, @@ -180,7 +210,7 @@ def __init__(self, client, options: Dict, response: Dict): ) for collection in response["items"] ] - + async def next(self): """Get the next page of results Returns: @@ -188,7 +218,13 @@ async def next(self): """ if self.page >= self.pages: return None - return await self.client.get_collections(user_id=self.user_id, page=self.page + 1, page_size=self.page_size) + return await self.client.get_collections( + user_id=self.user_id, + page=self.page + 1, + page_size=self.page_size, + reverse=self.reverse, + ) + class AsyncClient: """Honcho API Client Object""" @@ -196,7 +232,7 @@ class AsyncClient: def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): """Constructor for Client""" self.base_url = base_url # Base URL for the instance of the Honcho API - self.app_id = app_id # Representing ID of the client application + self.app_id = app_id # Representing ID of the client application self.client = httpx.AsyncClient() @property @@ -226,10 +262,17 @@ async def get_session(self, user_id: str, session_id: uuid.UUID): location_id=data["location_id"], is_active=data["is_active"], metadata=data["metadata"], - created_at=data["created_at"] + created_at=data["created_at"], ) - async def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): + async def get_sessions( + self, + user_id: str, + location_id: Optional[str] = None, + page: int = 1, + page_size: int = 50, + reverse: bool = False, + ): """Return sessions associated with a user paginated Args: @@ -242,19 +285,22 @@ async def get_sessions(self, user_id: str, location_id: Optional[str] = None, pa AsyncGetSessionPage: Page or results for get_sessions query """ - url = f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}" + ( - f"&location_id={location_id}" if location_id else "" + url = ( + f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}&reverse={reverse}" + + (f"&location_id={location_id}" if location_id else "") ) response = await self.client.get(url) response.raise_for_status() data = response.json() - options = { - "location_id": location_id, - "user_id": user_id - } + options = {"location_id": location_id, "user_id": user_id, "reverse": reverse} return AsyncGetSessionPage(self, options, data) - async def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): + async def get_sessions_generator( + self, + user_id: str, + location_id: Optional[str] = None, + reverse: bool = False, + ): """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app Args: @@ -267,7 +313,9 @@ async def get_sessions_generator(self, user_id: str, location_id: Optional[str] """ page = 1 page_size = 50 - get_session_response = await self.get_sessions(user_id, location_id, page, page_size) + get_session_response = await self.get_sessions( + user_id, location_id, page, page_size, reverse + ) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for session in get_session_response.items: @@ -276,7 +324,7 @@ async def get_sessions_generator(self, user_id: str, location_id: Optional[str] new_sessions = await get_session_response.next() if not new_sessions: break - + get_session_response = new_sessions async def create_session( @@ -309,7 +357,9 @@ async def create_session( ) async def create_collection( - self, user_id: str, name: str, + self, + user_id: str, + name: str, ): """Create a collection for a user @@ -354,29 +404,32 @@ async def get_collection(self, user_id: str, name: str): id=data["id"], user_id=data["user_id"], name=data["name"], - created_at=data["created_at"] + created_at=data["created_at"], ) - async def get_collections(self, user_id: str, page: int = 1, page_size: int = 50): + async def get_collections( + self, user_id: str, page: int = 1, page_size: int = 50, reverse: bool = False + ): """Return collections associated with a user paginated Args: user_id (str): The User ID representing the user to get the collection for page (int, optional): The page of results to return page_size (int, optional): The number of results to return + reverse (bool): Whether to reverse the order of the results Returns: AsyncGetCollectionPage: Page or results for get_collections query """ - url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}&reverse={reverse}" response = await self.client.get(url) response.raise_for_status() data = response.json() - options = {"user_id": user_id} + options = {"user_id": user_id, "reverse": reverse} return AsyncGetCollectionPage(self, options, data) - async def get_collections_generator(self, user_id: str): + async def get_collections_generator(self, user_id: str, reverse: bool = False): """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app Args: @@ -388,7 +441,9 @@ async def get_collections_generator(self, user_id: str): """ page = 1 page_size = 50 - get_collection_response = await self.get_collections(user_id, page, page_size) + get_collection_response = await self.get_collections( + user_id, page, page_size, reverse + ) while True: # get_collection_response = self.get_collections(user_id, location_id, page, page_size) for collection in get_collection_response.items: @@ -397,7 +452,7 @@ async def get_collections_generator(self, user_id: str): new_collections = await get_collection_response.next() if not new_collections: break - + get_collection_response = new_collections @@ -412,7 +467,7 @@ def __init__( location_id: str, metadata: dict, is_active: bool, - created_at: datetime.datetime + created_at: datetime.datetime, ): """Constructor for Session""" self.base_url: str = client.base_url @@ -434,7 +489,6 @@ def __str__(self): """String representation of Session""" return f"AsyncSession(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" - @property def is_active(self): """Returns whether the session is active - made property to prevent tampering""" @@ -458,7 +512,13 @@ async def create_message(self, is_user: bool, content: str): response = await self.client.post(url, json=data) response.raise_for_status() data = response.json() - return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content, created_at=data["created_at"]) + return Message( + session_id=self.id, + id=data["id"], + is_user=is_user, + content=content, + created_at=data["created_at"], + ) async def get_message(self, message_id: uuid.UUID) -> Message: """Get a specific message for a session based on ID @@ -474,26 +534,36 @@ async def get_message(self, message_id: uuid.UUID) -> Message: response = await self.client.get(url) response.raise_for_status() data = response.json() - return Message(session_id=self.id, id=data["id"], is_user=data["is_user"], content=data["content"], created_at=data["created_at"]) + return Message( + session_id=self.id, + id=data["id"], + is_user=data["is_user"], + content=data["content"], + created_at=data["created_at"], + ) - async def get_messages(self, page: int = 1, page_size: int = 50) -> AsyncGetMessagePage: + async def get_messages( + self, page: int = 1, page_size: int = 50, reverse: bool = False + ) -> AsyncGetMessagePage: """Get all messages for a session Args: page (int, optional): The page of results to return page_size (int, optional): The number of results to return per page + reverse (bool): Whether to reverse the order of the results Returns: AsyncGetMessagePage: Page of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}&reverse={reverse}" response = await self.client.get(url) response.raise_for_status() data = response.json() - return AsyncGetMessagePage(self, data) - - async def get_messages_generator(self): + options = {"reverse": reverse} + return AsyncGetMessagePage(self, options, data) + + async def get_messages_generator(self, reverse: bool = False): """Shortcut Generator for get_messages. Generator to iterate through all messages for a session in an app Yields: @@ -502,7 +572,7 @@ async def get_messages_generator(self): """ page = 1 page_size = 50 - get_messages_page= await self.get_messages(page, page_size) + get_messages_page = await self.get_messages(page, page_size, reverse) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for message in get_messages_page.items: @@ -511,10 +581,12 @@ async def get_messages_generator(self): new_messages = await get_messages_page.next() if not new_messages: break - + get_messages_page = new_messages - async def create_metamessage(self, message: Message, metamessage_type: str, content: str): + async def create_metamessage( + self, message: Message, metamessage_type: str, content: str + ): """Adds a metamessage to a session and links it to a specific message Args: @@ -528,13 +600,24 @@ async def create_metamessage(self, message: Message, metamessage_type: str, cont """ if not self.is_active: raise Exception("Session is inactive") - data = {"metamessage_type": metamessage_type, "content": content, "message_id": message.id} - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" + data = { + "metamessage_type": metamessage_type, + "content": content, + "message_id": message.id, + } + url = ( + f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" + ) response = await self.client.post(url, json=data) response.raise_for_status() data = response.json() - return Metamessage(id=data["id"], message_id=message.id, metamessage_type=metamessage_type, content=content, created_at=data["created_at"]) - + return Metamessage( + id=data["id"], + message_id=message.id, + metamessage_type=metamessage_type, + content=content, + created_at=data["created_at"], + ) async def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: """Get a specific metamessage @@ -550,9 +633,22 @@ async def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: response = await self.client.get(url) response.raise_for_status() data = response.json() - return Metamessage(id=data["id"], message_id=data["message_id"], metamessage_type=data["metamessage_type"], content=data["content"], created_at=data["created_at"]) + return Metamessage( + id=data["id"], + message_id=data["message_id"], + metamessage_type=data["metamessage_type"], + content=data["content"], + created_at=data["created_at"], + ) - async def get_metamessages(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None, page: int = 1, page_size: int = 50) -> AsyncGetMetamessagePage: + async def get_metamessages( + self, + metamessage_type: Optional[str] = None, + message: Optional[Message] = None, + page: int = 1, + page_size: int = 50, + reverse: bool = False, + ) -> AsyncGetMetamessagePage: """Get all messages for a session Args: @@ -563,7 +659,7 @@ async def get_metamessages(self, metamessage_type: Optional[str] = None, message list[Dict]: List of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}&reverse={reverse}" if metamessage_type: url += f"&metamessage_type={metamessage_type}" if message: @@ -572,12 +668,18 @@ async def get_metamessages(self, metamessage_type: Optional[str] = None, message response.raise_for_status() data = response.json() options = { - "metamessage_type": metamessage_type, - "message_id": message.id if message else None - } + "metamessage_type": metamessage_type, + "message_id": message.id if message else None, + "reverse": reverse, + } return AsyncGetMetamessagePage(self, options, data) - - async def get_metamessages_generator(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None): + + async def get_metamessages_generator( + self, + metamessage_type: Optional[str] = None, + message: Optional[Message] = None, + reverse: bool = False, + ): """Shortcut Generator for get_metamessages. Generator to iterate through all metamessages for a session in an app Args: @@ -590,19 +692,23 @@ async def get_metamessages_generator(self, metamessage_type: Optional[str] = Non """ page = 1 page_size = 50 - get_metamessages_page = await self.get_metamessages(metamessage_type=metamessage_type, message=message, page=page, page_size=page_size) + get_metamessages_page = await self.get_metamessages( + metamessage_type=metamessage_type, + message=message, + page=page, + page_size=page_size, + reverse=reverse, + ) while True: - # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for metamessage in get_metamessages_page.items: yield metamessage new_messages = await get_metamessages_page.next() if not new_messages: break - + get_metamessages_page = new_messages - async def update(self, metadata: Dict): """Update the metadata of a session @@ -626,6 +732,7 @@ async def close(self): response.raise_for_status() self._is_active = False + class AsyncCollection: """Represents a single collection for a user in an app""" @@ -634,8 +741,8 @@ def __init__( client: AsyncClient, id: uuid.UUID, user_id: str, - name: str, - created_at: datetime.datetime, + name: str, + created_at: datetime.datetime, ): """Constructor for Collection""" self.base_url: str = client.base_url @@ -690,17 +797,19 @@ async def create_document(self, content: str, metadata: Dict = {}): """ data = {"metadata": metadata, "content": content} - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + url = ( + f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + ) response = await self.client.post(url, json=data) response.raise_for_status() data = response.json() return Document( - collection_id=self.id, - id=data["id"], - metadata=metadata, - content=content, - created_at=data["created_at"] - ) + collection_id=self.id, + id=data["id"], + metadata=metadata, + content=content, + created_at=data["created_at"], + ) async def get_document(self, document_id: uuid.UUID) -> Document: """Get a specific document for a collection based on ID @@ -717,14 +826,16 @@ async def get_document(self, document_id: uuid.UUID) -> Document: response.raise_for_status() data = response.json() return Document( - collection_id=self.id, - id=data["id"], - metadata=data["metadata"], - content=data["content"], - created_at=data["created_at"] - ) + collection_id=self.id, + id=data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"], + ) - async def get_documents(self, page: int = 1, page_size: int = 50) -> AsyncGetDocumentPage: + async def get_documents( + self, page: int = 1, page_size: int = 50, reverse: bool = False + ) -> AsyncGetDocumentPage: """Get all documents for a collection Args: @@ -735,13 +846,14 @@ async def get_documents(self, page: int = 1, page_size: int = 50) -> AsyncGetDoc AsyncGetDocumentPage: Page of Document objects """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}&reverse={reverse}" response = await self.client.get(url) response.raise_for_status() data = response.json() - return AsyncGetDocumentPage(self, data) - - async def get_documents_generator(self): + options = {"reverse": reverse} + return AsyncGetDocumentPage(self, options, data) + + async def get_documents_generator(self, reverse: bool = False): """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app Yields: @@ -750,7 +862,7 @@ async def get_documents_generator(self): """ page = 1 page_size = 50 - get_documents_page= await self.get_documents(page, page_size) + get_documents_page = await self.get_documents(page, page_size, reverse) while True: for document in get_documents_page.items: yield document @@ -758,11 +870,11 @@ async def get_documents_generator(self): new_documents = await get_documents_page.next() if not new_documents: break - + get_documents_page = new_documents async def query(self, query: str, top_k: int = 5) -> List[Document]: - """query the documents by cosine distance + """query the documents by cosine distance Args: query (str): The query string to compare other embeddings too top_k (int, optional): The number of results to return. Defaults to 5 max 50 @@ -774,18 +886,20 @@ async def query(self, query: str, top_k: int = 5) -> List[Document]: response = await self.client.get(url) response.raise_for_status() data = [ - Document( - collection_id=self.id, - content=document["content"], - id=document["id"], - created_at=document["created_at"], - metadata=document["metadata"] - ) - for document in response.json() + Document( + collection_id=self.id, + content=document["content"], + id=document["id"], + created_at=document["created_at"], + metadata=document["metadata"], + ) + for document in response.json() ] return data - async def update_document(self, document: Document, content: Optional[str], metadata: Optional[Dict]) -> Document: + async def update_document( + self, document: Document, content: Optional[str], metadata: Optional[Dict] + ) -> Document: """Update a document in the collection Args: diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 72c5261..5606bb6 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -4,8 +4,10 @@ import httpx from .schemas import Message, Metamessage, Document + class GetPage: """Base class for receiving Paginated API results""" + def __init__(self, response: Dict) -> None: """Constructor for Page with relevant information about the results and pages @@ -16,18 +18,19 @@ def __init__(self, response: Dict) -> None: self.page = response["page"] self.page_size = response["size"] self.pages = response["pages"] - self.items =[] + self.items = [] def next(self): """Shortcut method to Get the next page of results""" pass + class GetSessionPage(GetPage): """Paginated Results for Get Session Requests""" def __init__(self, client, options: Dict, response: Dict): """Constructor for Page Result from Session Get Request - + Args: client (Client): Honcho Client options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are user_id which is required and location_id which is optional @@ -37,6 +40,7 @@ def __init__(self, client, options: Dict, response: Dict): self.client = client self.user_id = options["user_id"] self.location_id = options["location_id"] + self.reverse = options["reverse"] self.items = [ Session( client=client, @@ -49,7 +53,7 @@ def __init__(self, client, options: Dict, response: Dict): ) for session in response["items"] ] - + def next(self): """Get the next page of results Returns: @@ -57,22 +61,30 @@ def next(self): """ if self.page >= self.pages: return None - return self.client.get_sessions(self.user_id, self.location_id, self.page + 1, self.page_size) + return self.client.get_sessions( + user_id=self.user_id, + location_id=self.location_id, + page=(self.page + 1), + page_size=self.page_size, + reverse=self.reverse, + ) + class GetMessagePage(GetPage): """Paginated Results for Get Session Requests""" - def __init__(self, session, response: Dict): + def __init__(self, session, options, response: Dict): """Constructor for Page Result from Session Get Request - + Args: session (Session): Session the returned messages are associated with response (Dict): Response from API with pagination information """ super().__init__(response) self.session = session + self.reverse = options["reverse"] self.items = [ - Message( + Message( session_id=session.id, id=message["id"], is_user=message["is_user"], @@ -89,13 +101,15 @@ def next(self): """ if self.page >= self.pages: return None - return self.session.get_messages((self.page + 1), self.page_size) + return self.session.get_messages( + (self.page + 1), self.page_size, self.reverse + ) + class GetMetamessagePage(GetPage): - def __init__(self, session, options: Dict, response: Dict) -> None: """Constructor for Page Result from Metamessage Get Request - + Args: session (Session): Session the returned messages are associated with options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both optional @@ -104,16 +118,19 @@ def __init__(self, session, options: Dict, response: Dict) -> None: super().__init__(response) self.session = session self.message_id = options["message_id"] if "message_id" in options else None - self.metamessage_type = options["metamessage_type"] if "metamessage_type" in options else None + self.metamessage_type = ( + options["metamessage_type"] if "metamessage_type" in options else None + ) + self.reverse = options["reverse"] self.items = [ - Metamessage( - id=metamessage["id"], - message_id=metamessage["message_id"], - metamessage_type=metamessage["metamessage_type"], - content=metamessage["content"], - created_at=metamessage["created_at"], - ) - for metamessage in response["items"] + Metamessage( + id=metamessage["id"], + message_id=metamessage["message_id"], + metamessage_type=metamessage["metamessage_type"], + content=metamessage["content"], + created_at=metamessage["created_at"], + ) + for metamessage in response["items"] ] def next(self): @@ -123,19 +140,28 @@ def next(self): """ if self.page >= self.pages: return None - return self.session.get_metamessages(metamessage_type=self.metamessage_type, message=self.message_id, page=(self.page + 1), page_size=self.page_size) + return self.session.get_metamessages( + metamessage_type=self.metamessage_type, + message=self.message_id, + page=(self.page + 1), + page_size=self.page_size, + reverse=self.reverse, + ) + class GetDocumentPage(GetPage): """Paginated results for Get Document requests""" - def __init__(self, collection, response: Dict) -> None: + + def __init__(self, collection, options, response: Dict) -> None: """Constructor for Page Result from Document Get Request - + Args: collection (Collection): Collection the returned documents are associated with response (Dict): Response from API with pagination information """ super().__init__(response) self.collection = collection + self.reverse = options["reverse"] self.items = [ Document( id=document["id"], @@ -143,7 +169,7 @@ def __init__(self, collection, response: Dict) -> None: content=document["content"], metadata=document["metadata"], created_at=document["created_at"], - ) + ) for document in response["items"] ] @@ -154,14 +180,17 @@ def next(self): """ if self.page >= self.pages: return None - return self.collection.get_documents(page=self.page + 1, page_size=self.page_size) + return self.collection.get_documents( + page=self.page + 1, page_size=self.page_size, reverse=self.reverse + ) + class GetCollectionPage(GetPage): """Paginated results for Get Collection requests""" def __init__(self, client, options: Dict, response: Dict): """Constructor for page result from Get Collection Request - + Args: client ( Client): Honcho Client options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required @@ -170,6 +199,7 @@ def __init__(self, client, options: Dict, response: Dict): super().__init__(response) self.client = client self.user_id = options["user_id"] + self.reverse = options["reverse"] self.items = [ Collection( client=client, @@ -180,7 +210,7 @@ def __init__(self, client, options: Dict, response: Dict): ) for collection in response["items"] ] - + def next(self): """Get the next page of results Returns: @@ -188,7 +218,13 @@ def next(self): """ if self.page >= self.pages: return None - return self.client.get_collections(user_id=self.user_id, page=self.page + 1, page_size=self.page_size) + return self.client.get_collections( + user_id=self.user_id, + page=self.page + 1, + page_size=self.page_size, + reverse=self.reverse, + ) + class Client: """Honcho API Client Object""" @@ -196,7 +232,7 @@ class Client: def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): """Constructor for Client""" self.base_url = base_url # Base URL for the instance of the Honcho API - self.app_id = app_id # Representing ID of the client application + self.app_id = app_id # Representing ID of the client application self.client = httpx.Client() @property @@ -226,10 +262,17 @@ def get_session(self, user_id: str, session_id: uuid.UUID): location_id=data["location_id"], is_active=data["is_active"], metadata=data["metadata"], - created_at=data["created_at"] + created_at=data["created_at"], ) - def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50): + def get_sessions( + self, + user_id: str, + location_id: Optional[str] = None, + page: int = 1, + page_size: int = 50, + reverse: bool = False, + ): """Return sessions associated with a user paginated Args: @@ -242,19 +285,22 @@ def get_sessions(self, user_id: str, location_id: Optional[str] = None, page: in GetSessionPage: Page or results for get_sessions query """ - url = f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}" + ( - f"&location_id={location_id}" if location_id else "" + url = ( + f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}&reverse={reverse}" + + (f"&location_id={location_id}" if location_id else "") ) response = self.client.get(url) response.raise_for_status() data = response.json() - options = { - "location_id": location_id, - "user_id": user_id - } + options = {"location_id": location_id, "user_id": user_id, "reverse": reverse} return GetSessionPage(self, options, data) - def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None): + def get_sessions_generator( + self, + user_id: str, + location_id: Optional[str] = None, + reverse: bool = False, + ): """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app Args: @@ -267,7 +313,9 @@ def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None """ page = 1 page_size = 50 - get_session_response = self.get_sessions(user_id, location_id, page, page_size) + get_session_response = self.get_sessions( + user_id, location_id, page, page_size, reverse + ) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for session in get_session_response.items: @@ -276,7 +324,7 @@ def get_sessions_generator(self, user_id: str, location_id: Optional[str] = None new_sessions = get_session_response.next() if not new_sessions: break - + get_session_response = new_sessions def create_session( @@ -309,7 +357,9 @@ def create_session( ) def create_collection( - self, user_id: str, name: str, + self, + user_id: str, + name: str, ): """Create a collection for a user @@ -354,29 +404,32 @@ def get_collection(self, user_id: str, name: str): id=data["id"], user_id=data["user_id"], name=data["name"], - created_at=data["created_at"] + created_at=data["created_at"], ) - def get_collections(self, user_id: str, page: int = 1, page_size: int = 50): + def get_collections( + self, user_id: str, page: int = 1, page_size: int = 50, reverse: bool = False + ): """Return collections associated with a user paginated Args: user_id (str): The User ID representing the user to get the collection for page (int, optional): The page of results to return page_size (int, optional): The number of results to return + reverse (bool): Whether to reverse the order of the results Returns: GetCollectionPage: Page or results for get_collections query """ - url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}&reverse={reverse}" response = self.client.get(url) response.raise_for_status() data = response.json() - options = {"user_id": user_id} + options = {"user_id": user_id, "reverse": reverse} return GetCollectionPage(self, options, data) - def get_collections_generator(self, user_id: str): + def get_collections_generator(self, user_id: str, reverse: bool = False): """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app Args: @@ -388,7 +441,9 @@ def get_collections_generator(self, user_id: str): """ page = 1 page_size = 50 - get_collection_response = self.get_collections(user_id, page, page_size) + get_collection_response = self.get_collections( + user_id, page, page_size, reverse + ) while True: # get_collection_response = self.get_collections(user_id, location_id, page, page_size) for collection in get_collection_response.items: @@ -397,7 +452,7 @@ def get_collections_generator(self, user_id: str): new_collections = get_collection_response.next() if not new_collections: break - + get_collection_response = new_collections @@ -412,7 +467,7 @@ def __init__( location_id: str, metadata: dict, is_active: bool, - created_at: datetime.datetime + created_at: datetime.datetime, ): """Constructor for Session""" self.base_url: str = client.base_url @@ -434,7 +489,6 @@ def __str__(self): """String representation of Session""" return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" - @property def is_active(self): """Returns whether the session is active - made property to prevent tampering""" @@ -458,7 +512,13 @@ def create_message(self, is_user: bool, content: str): response = self.client.post(url, json=data) response.raise_for_status() data = response.json() - return Message(session_id=self.id, id=data["id"], is_user=is_user, content=content, created_at=data["created_at"]) + return Message( + session_id=self.id, + id=data["id"], + is_user=is_user, + content=content, + created_at=data["created_at"], + ) def get_message(self, message_id: uuid.UUID) -> Message: """Get a specific message for a session based on ID @@ -474,26 +534,36 @@ def get_message(self, message_id: uuid.UUID) -> Message: response = self.client.get(url) response.raise_for_status() data = response.json() - return Message(session_id=self.id, id=data["id"], is_user=data["is_user"], content=data["content"], created_at=data["created_at"]) + return Message( + session_id=self.id, + id=data["id"], + is_user=data["is_user"], + content=data["content"], + created_at=data["created_at"], + ) - def get_messages(self, page: int = 1, page_size: int = 50) -> GetMessagePage: + def get_messages( + self, page: int = 1, page_size: int = 50, reverse: bool = False + ) -> GetMessagePage: """Get all messages for a session Args: page (int, optional): The page of results to return page_size (int, optional): The number of results to return per page + reverse (bool): Whether to reverse the order of the results Returns: GetMessagePage: Page of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}&reverse={reverse}" response = self.client.get(url) response.raise_for_status() data = response.json() - return GetMessagePage(self, data) - - def get_messages_generator(self): + options = {"reverse": reverse} + return GetMessagePage(self, options, data) + + def get_messages_generator(self, reverse: bool = False): """Shortcut Generator for get_messages. Generator to iterate through all messages for a session in an app Yields: @@ -502,7 +572,7 @@ def get_messages_generator(self): """ page = 1 page_size = 50 - get_messages_page= self.get_messages(page, page_size) + get_messages_page = self.get_messages(page, page_size, reverse) while True: # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for message in get_messages_page.items: @@ -511,10 +581,12 @@ def get_messages_generator(self): new_messages = get_messages_page.next() if not new_messages: break - + get_messages_page = new_messages - def create_metamessage(self, message: Message, metamessage_type: str, content: str): + def create_metamessage( + self, message: Message, metamessage_type: str, content: str + ): """Adds a metamessage to a session and links it to a specific message Args: @@ -528,13 +600,24 @@ def create_metamessage(self, message: Message, metamessage_type: str, content: s """ if not self.is_active: raise Exception("Session is inactive") - data = {"metamessage_type": metamessage_type, "content": content, "message_id": message.id} - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" + data = { + "metamessage_type": metamessage_type, + "content": content, + "message_id": message.id, + } + url = ( + f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" + ) response = self.client.post(url, json=data) response.raise_for_status() data = response.json() - return Metamessage(id=data["id"], message_id=message.id, metamessage_type=metamessage_type, content=content, created_at=data["created_at"]) - + return Metamessage( + id=data["id"], + message_id=message.id, + metamessage_type=metamessage_type, + content=content, + created_at=data["created_at"], + ) def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: """Get a specific metamessage @@ -550,9 +633,22 @@ def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: response = self.client.get(url) response.raise_for_status() data = response.json() - return Metamessage(id=data["id"], message_id=data["message_id"], metamessage_type=data["metamessage_type"], content=data["content"], created_at=data["created_at"]) + return Metamessage( + id=data["id"], + message_id=data["message_id"], + metamessage_type=data["metamessage_type"], + content=data["content"], + created_at=data["created_at"], + ) - def get_metamessages(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None, page: int = 1, page_size: int = 50) -> GetMetamessagePage: + def get_metamessages( + self, + metamessage_type: Optional[str] = None, + message: Optional[Message] = None, + page: int = 1, + page_size: int = 50, + reverse: bool = False, + ) -> GetMetamessagePage: """Get all messages for a session Args: @@ -563,7 +659,7 @@ def get_metamessages(self, metamessage_type: Optional[str] = None, message: Opti list[Dict]: List of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}&reverse={reverse}" if metamessage_type: url += f"&metamessage_type={metamessage_type}" if message: @@ -572,12 +668,18 @@ def get_metamessages(self, metamessage_type: Optional[str] = None, message: Opti response.raise_for_status() data = response.json() options = { - "metamessage_type": metamessage_type, - "message_id": message.id if message else None - } + "metamessage_type": metamessage_type, + "message_id": message.id if message else None, + "reverse": reverse, + } return GetMetamessagePage(self, options, data) - - def get_metamessages_generator(self, metamessage_type: Optional[str] = None, message: Optional[Message] = None): + + def get_metamessages_generator( + self, + metamessage_type: Optional[str] = None, + message: Optional[Message] = None, + reverse: bool = False, + ): """Shortcut Generator for get_metamessages. Generator to iterate through all metamessages for a session in an app Args: @@ -590,19 +692,23 @@ def get_metamessages_generator(self, metamessage_type: Optional[str] = None, mes """ page = 1 page_size = 50 - get_metamessages_page = self.get_metamessages(metamessage_type=metamessage_type, message=message, page=page, page_size=page_size) + get_metamessages_page = self.get_metamessages( + metamessage_type=metamessage_type, + message=message, + page=page, + page_size=page_size, + reverse=reverse, + ) while True: - # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for metamessage in get_metamessages_page.items: yield metamessage new_messages = get_metamessages_page.next() if not new_messages: break - + get_metamessages_page = new_messages - def update(self, metadata: Dict): """Update the metadata of a session @@ -626,6 +732,7 @@ def close(self): response.raise_for_status() self._is_active = False + class Collection: """Represents a single collection for a user in an app""" @@ -634,8 +741,8 @@ def __init__( client: Client, id: uuid.UUID, user_id: str, - name: str, - created_at: datetime.datetime, + name: str, + created_at: datetime.datetime, ): """Constructor for Collection""" self.base_url: str = client.base_url @@ -690,17 +797,19 @@ def create_document(self, content: str, metadata: Dict = {}): """ data = {"metadata": metadata, "content": content} - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + url = ( + f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" + ) response = self.client.post(url, json=data) response.raise_for_status() data = response.json() return Document( - collection_id=self.id, - id=data["id"], - metadata=metadata, - content=content, - created_at=data["created_at"] - ) + collection_id=self.id, + id=data["id"], + metadata=metadata, + content=content, + created_at=data["created_at"], + ) def get_document(self, document_id: uuid.UUID) -> Document: """Get a specific document for a collection based on ID @@ -717,14 +826,16 @@ def get_document(self, document_id: uuid.UUID) -> Document: response.raise_for_status() data = response.json() return Document( - collection_id=self.id, - id=data["id"], - metadata=data["metadata"], - content=data["content"], - created_at=data["created_at"] - ) + collection_id=self.id, + id=data["id"], + metadata=data["metadata"], + content=data["content"], + created_at=data["created_at"], + ) - def get_documents(self, page: int = 1, page_size: int = 50) -> GetDocumentPage: + def get_documents( + self, page: int = 1, page_size: int = 50, reverse: bool = False + ) -> GetDocumentPage: """Get all documents for a collection Args: @@ -735,13 +846,14 @@ def get_documents(self, page: int = 1, page_size: int = 50) -> GetDocumentPage: GetDocumentPage: Page of Document objects """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}" + url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}&reverse={reverse}" response = self.client.get(url) response.raise_for_status() data = response.json() - return GetDocumentPage(self, data) - - def get_documents_generator(self): + options = {"reverse": reverse} + return GetDocumentPage(self, options, data) + + def get_documents_generator(self, reverse: bool = False): """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app Yields: @@ -750,7 +862,7 @@ def get_documents_generator(self): """ page = 1 page_size = 50 - get_documents_page= self.get_documents(page, page_size) + get_documents_page = self.get_documents(page, page_size, reverse) while True: for document in get_documents_page.items: yield document @@ -758,11 +870,11 @@ def get_documents_generator(self): new_documents = get_documents_page.next() if not new_documents: break - + get_documents_page = new_documents def query(self, query: str, top_k: int = 5) -> List[Document]: - """query the documents by cosine distance + """query the documents by cosine distance Args: query (str): The query string to compare other embeddings too top_k (int, optional): The number of results to return. Defaults to 5 max 50 @@ -774,18 +886,20 @@ def query(self, query: str, top_k: int = 5) -> List[Document]: response = self.client.get(url) response.raise_for_status() data = [ - Document( - collection_id=self.id, - content=document["content"], - id=document["id"], - created_at=document["created_at"], - metadata=document["metadata"] - ) - for document in response.json() + Document( + collection_id=self.id, + content=document["content"], + id=document["id"], + created_at=document["created_at"], + metadata=document["metadata"], + ) + for document in response.json() ] return data - def update_document(self, document: Document, content: Optional[str], metadata: Optional[Dict]) -> Document: + def update_document( + self, document: Document, content: Optional[str], metadata: Optional[Dict] + ) -> Document: """Update a document in the collection Args: From ef636ce217c648f9f89513389fc5069b4de9b784 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Tue, 20 Feb 2024 22:56:02 -0800 Subject: [PATCH 29/46] Address dependabot --- api/poetry.lock | 52 +++++++++++++++++++++++----------------------- api/pyproject.toml | 2 +- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/api/poetry.lock b/api/poetry.lock index fe56dc9..9a68f07 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -17,25 +17,26 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "anyio" -version = "3.7.1" +version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] [[package]] name = "certifi" @@ -123,35 +124,34 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.105.0" +version = "0.109.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.105.0-py3-none-any.whl", hash = "sha256:f19ebf6fdc82a3281d10f2cb4774bdfa90238e3b40af3525a0c09fd08ad1c480"}, - {file = "fastapi-0.105.0.tar.gz", hash = "sha256:4d12838819aa52af244580675825e750ad67c9df4614f557a769606af902cf22"}, + {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, + {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, ] [package.dependencies] -anyio = ">=3.7.1,<4.0.0" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.27.0,<0.28.0" +starlette = ">=0.36.3,<0.37.0" typing-extensions = ">=4.8.0" [package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "fastapi-pagination" -version = "0.12.15" +version = "0.12.16" description = "FastAPI pagination" category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "fastapi_pagination-0.12.15-py3-none-any.whl", hash = "sha256:bcfea8622b48135ef759b926d9d09fa8e16bc8adab26ec2b65d1647e72d39988"}, - {file = "fastapi_pagination-0.12.15.tar.gz", hash = "sha256:a7e5e48cd9d183f29532455a1689dfac575877b7ff10d112ddb56cb3d047a457"}, + {file = "fastapi_pagination-0.12.16-py3-none-any.whl", hash = "sha256:1179edea6c8d3b6b70d3f373047470b08a948bfef817ff8e722d46969f87998c"}, + {file = "fastapi_pagination-0.12.16.tar.gz", hash = "sha256:3c74d77d42451518e9d85aa1c3633b725f42d9746d68d1e9267f6c0493750497"}, ] [package.dependencies] @@ -340,14 +340,14 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", [[package]] name = "limits" -version = "3.8.0" +version = "3.9.0" description = "Rate limiting utilities" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "limits-3.8.0-py3-none-any.whl", hash = "sha256:6e3c75712359dfaea28bee23832bd814bbe66a42c92bbd848154dfba0d4c4503"}, - {file = "limits-3.8.0.tar.gz", hash = "sha256:7dd4955dec3c7a219be04e661251ae243a48050e84053bf68b31dd07890f28c2"}, + {file = "limits-3.9.0-py3-none-any.whl", hash = "sha256:6dce07d1a4d7bd3361d36f59f3f43c4f39675001daeeae2617c3be42d718daa8"}, + {file = "limits-3.9.0.tar.gz", hash = "sha256:7b44aa4d05c539276928372681190136914958cccbb99c30ecc5df72a179661a"}, ] [package.dependencies] @@ -785,14 +785,14 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.27.0" +version = "0.36.3" description = "The little ASGI library that shines." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, - {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, + {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, + {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, ] [package.dependencies] @@ -800,7 +800,7 @@ anyio = ">=3.4.0,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] [[package]] name = "tqdm" @@ -954,4 +954,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "90a0874f29e706994647a141418ed4eca5bd621518396d525d27039ad586e4bc" +content-hash = "49ec8fef5f21cb5bf2a8bbd007f016bd5bd88f8bdf604f3a820c59c07f984060" diff --git a/api/pyproject.toml b/api/pyproject.toml index 6034c3f..2c828a9 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -7,7 +7,7 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.8" -fastapi = "^0.105.0" +fastapi = "^0.109.0" uvicorn = "^0.24.0.post1" python-dotenv = "^1.0.0" sqlalchemy = "^2.0.25" From 10b65b2cdc04a2c521e496bdf9b8450f96f5551d Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Wed, 21 Feb 2024 15:53:23 -0800 Subject: [PATCH 30/46] Formatting --- api/src/models.py | 68 +++++++++++++++++++++++++++++++++-------------- 1 file changed, 48 insertions(+), 20 deletions(-) diff --git a/api/src/models.py b/api/src/models.py index ea4b86b..ce20747 100644 --- a/api/src/models.py +++ b/api/src/models.py @@ -12,70 +12,98 @@ load_dotenv() -DATABASE_TYPE = os.getenv("DATABASE_TYPE", 'postgres') +DATABASE_TYPE = os.getenv("DATABASE_TYPE", "postgres") + +ColumnType = JSONB if DATABASE_TYPE == "postgres" else JSON -ColumnType = JSONB if DATABASE_TYPE == 'postgres' else JSON class Session(Base): __tablename__ = "sessions" - id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, index=True, default=uuid.uuid4 + ) app_id: Mapped[str] = mapped_column(String(512), index=True) user_id: Mapped[str] = mapped_column(String(512), index=True) - location_id: Mapped[str] = mapped_column(String(512), index=True) + location_id: Mapped[str] = mapped_column(String(512), index=True, default="default") is_active: Mapped[bool] = mapped_column(default=True) - h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) - created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) + h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) + created_at: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.utcnow + ) messages = relationship("Message", back_populates="session") def __repr__(self) -> str: return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, is_active={self.is_active}, created_at={self.created_at}, h_metadata={self.h_metadata})" + class Message(Base): __tablename__ = "messages" - id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, index=True, default=uuid.uuid4 + ) session_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("sessions.id")) is_user: Mapped[bool] - content: Mapped[str] = mapped_column(String(65535)) + content: Mapped[str] = mapped_column(String(65535)) - created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) + created_at: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.utcnow + ) session = relationship("Session", back_populates="messages") metamessages = relationship("Metamessage", back_populates="message") + def __repr__(self) -> str: return f"Message(id={self.id}, session_id={self.session_id}, is_user={self.is_user}, content={self.content[10:]})" + class Metamessage(Base): __tablename__ = "metamessages" - id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) - metamessage_type: Mapped[str] = mapped_column(String(512), index=True) - content: Mapped[str] = mapped_column(String(65535)) + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, index=True, default=uuid.uuid4 + ) + metamessage_type: Mapped[str] = mapped_column(String(512), index=True) + content: Mapped[str] = mapped_column(String(65535)) message_id = Column(Uuid, ForeignKey("messages.id")) message = relationship("Message", back_populates="metamessages") - created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) + created_at: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.utcnow + ) def __repr__(self) -> str: return f"Metamessages(id={self.id}, message_id={self.message_id}, metamessage_type={self.metamessage_type}, content={self.content[10:]})" + class Collection(Base): __tablename__ = "collections" - id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, index=True, default=uuid.uuid4 + ) name: Mapped[str] = mapped_column(String(512), index=True) app_id: Mapped[str] = mapped_column(String(512), index=True) user_id: Mapped[str] = mapped_column(String(512), index=True) - created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) - documents = relationship("Document", back_populates="collection", cascade="all, delete, delete-orphan") + created_at: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.utcnow + ) + documents = relationship( + "Document", back_populates="collection", cascade="all, delete, delete-orphan" + ) __table_args__ = ( - UniqueConstraint('name', 'app_id', 'user_id', name="unique_name_app_user"), + UniqueConstraint("name", "app_id", "user_id", name="unique_name_app_user"), ) + class Document(Base): __tablename__ = "documents" - id: Mapped[uuid.UUID] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, index=True, default=uuid.uuid4 + ) h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) content: Mapped[str] = mapped_column(String(65535)) embedding = mapped_column(Vector(1536)) - created_at: Mapped[datetime.datetime] = mapped_column(default=datetime.datetime.utcnow) - + created_at: Mapped[datetime.datetime] = mapped_column( + default=datetime.datetime.utcnow + ) + collection_id = Column(Uuid, ForeignKey("collections.id")) collection = relationship("Collection", back_populates="documents") From f82bd7f3c57e990c9e46a5b73253e9e377befc63 Mon Sep 17 00:00:00 2001 From: vintro Date: Tue, 20 Feb 2024 12:54:38 -0500 Subject: [PATCH 31/46] initial commit on honcho dspy personas --- .../honcho-dspy-personas/.env.template | 2 + .../discord/honcho-dspy-personas/.gitignore | 5 + example/discord/honcho-dspy-personas/bot.py | 79 + example/discord/honcho-dspy-personas/chain.py | 114 + example/discord/honcho-dspy-personas/graph.py | 83 + .../langchain_prompts/state_check.yaml | 10 + .../langchain_prompts/state_commentary.yaml | 8 + .../langchain_prompts/state_labeling.yaml | 9 + .../discord/honcho-dspy-personas/metric.py | 23 + .../discord/honcho-dspy-personas/poetry.lock | 2191 +++++++++++++++++ .../honcho-dspy-personas/pyproject.toml | 20 + 11 files changed, 2544 insertions(+) create mode 100644 example/discord/honcho-dspy-personas/.env.template create mode 100644 example/discord/honcho-dspy-personas/.gitignore create mode 100644 example/discord/honcho-dspy-personas/bot.py create mode 100644 example/discord/honcho-dspy-personas/chain.py create mode 100644 example/discord/honcho-dspy-personas/graph.py create mode 100644 example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml create mode 100644 example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml create mode 100644 example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml create mode 100644 example/discord/honcho-dspy-personas/metric.py create mode 100644 example/discord/honcho-dspy-personas/poetry.lock create mode 100644 example/discord/honcho-dspy-personas/pyproject.toml diff --git a/example/discord/honcho-dspy-personas/.env.template b/example/discord/honcho-dspy-personas/.env.template new file mode 100644 index 0000000..0aafc86 --- /dev/null +++ b/example/discord/honcho-dspy-personas/.env.template @@ -0,0 +1,2 @@ +BOT_TOKEN= +OPENAI_API_KEY= \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/.gitignore b/example/discord/honcho-dspy-personas/.gitignore new file mode 100644 index 0000000..f133efa --- /dev/null +++ b/example/discord/honcho-dspy-personas/.gitignore @@ -0,0 +1,5 @@ +.env + +.venv + +.DS_Store \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py new file mode 100644 index 0000000..caab5ee --- /dev/null +++ b/example/discord/honcho-dspy-personas/bot.py @@ -0,0 +1,79 @@ +import os +from uuid import uuid1 +import discord +from honcho import Client as HonchoClient +from graph import langchain_message_converter, chat + + +intents = discord.Intents.default() +intents.messages = True +intents.message_content = True +intents.members = True + +app_id = "vince/dspy-personas" + +#honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local +honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev + +bot = discord.Bot(intents=intents) + +@bot.event +async def on_ready(): + print(f'We have logged in as {bot.user}') + +@bot.event +async def on_member_join(member): + await member.send( + f"*Hello {member.name}, welcome to the server! This is a demo bot built with Honcho,* " + "*implementing a naive user modeling method.* " + "*To get started, just type a message in this channel and the bot will respond.* " + "*Over time, it will classify the \"state\" you're in and optimize conversations based on that state.* " + "*You can use the /restart command to restart the conversation at any time.* " + "*If you have any questions or feedback, feel free to ask in the #honcho channel.* " + "*Enjoy!*" + ) + + +@bot.event +async def on_message(message): + if message.author == bot.user or message.guild is not None: + return + + user_id = f"discord_{str(message.author.id)}" + location_id=str(message.channel.id) + + sessions = list(honcho.get_sessions_generator(user_id, location_id)) + + if len(sessions) > 0: + session = sessions[0] + else: + session = honcho.create_session(user_id, location_id) + + history = list(session.get_messages(page_size=10)) + chat_history = langchain_message_converter(history) + + inp = message.content + user_message = session.create_message(is_user=True, content=inp) + + async with message.channel.typing(): + response = await chat( + chat_history=chat_history, + user_message=user_message, + session=session, + input=inp + ) + await message.channel.send(response) + + session.create_message(is_user=False, content=response) + +@bot.slash_command(name = "restart", description = "Restart the Conversation") +async def restart(ctx): + user_id=f"discord_{str(ctx.author.id)}" + location_id=str(ctx.channel_id) + sessions = list(honcho.get_sessions_generator(user_id, location_id)) + sessions[0].close() if len(sessions) > 0 else None + + msg = "Great! The conversation has been restarted. What would you like to talk about?" + await ctx.respond(msg) + +bot.run(os.environ["BOT_TOKEN"]) diff --git a/example/discord/honcho-dspy-personas/chain.py b/example/discord/honcho-dspy-personas/chain.py new file mode 100644 index 0000000..8d19707 --- /dev/null +++ b/example/discord/honcho-dspy-personas/chain.py @@ -0,0 +1,114 @@ +import os +from typing import List, Union +from langchain_openai import ChatOpenAI +from langchain_core.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, load_prompt +from langchain_core.messages import AIMessage, HumanMessage + +from honcho import Message + +# langchain prompts +SYSTEM_STATE_COMMENTARY = load_prompt(os.path.join(os.path.dirname(__file__), 'langchain_prompts/state_commentary.yaml')) +SYSTEM_STATE_LABELING = load_prompt(os.path.join(os.path.dirname(__file__), 'langchain_prompts/state_labeling.yaml')) +SYSTEM_STATE_CHECK = load_prompt(os.path.join(os.path.dirname(__file__), 'langchain_prompts/state_check.yaml')) + +# quick utility function to convert messages from honcho to langchain +def langchain_message_converter(messages: List[Message]) -> List[Union[AIMessage, HumanMessage]]: + new_messages = [] + for message in messages: + if message.is_user: + new_messages.append(HumanMessage(content=message.content)) + else: + new_messages.append(AIMessage(content=message.content)) + return new_messages + + +# convert chat history and user input into a string +def format_chat_history(chat_history: List[Message], user_input=None): + messages = [("user: " + message.content if isinstance(message, HumanMessage) else "ai: " + message.content) for message in chat_history] + if user_input: + messages.append(f"user: {user_input}") + + return "\n".join(messages) + + + +class StateExtractor: + """Wrapper class for all the DSPy and LangChain code for user state labeling and pipeline optimization""" + lc_gpt_4: ChatOpenAI = ChatOpenAI(model_name = "gpt-4") + lc_gpt_turbo: ChatOpenAI = ChatOpenAI(model_name = "gpt-3.5-turbo") + system_state_commentary: SystemMessagePromptTemplate = SystemMessagePromptTemplate(SYSTEM_STATE_COMMENTARY) + system_state_labeling: SystemMessagePromptTemplate = SystemMessagePromptTemplate(SYSTEM_STATE_LABELING) + system_state_check: SystemMessagePromptTemplate = SystemMessagePromptTemplate(SYSTEM_STATE_CHECK) + + def __init__(self) -> None: + pass + + @classmethod + async def generate_state_commentary(cls, chat_history: List[Message], input: str) -> str: + """Generate a commentary on the current state of the user""" + # format prompt + state_commentary = ChatPromptTemplate.from_messages([ + cls.system_state_commentary + ]) + # LCEL + chain = state_commentary | cls.lc_gpt_4 + # inference + response = await chain.ainvoke({ + "chat_history": format_chat_history(chat_history, user_input=input), + "user_input": input + }) + # return output + return response.content + + @classmethod + async def generate_state_label(cls, state_commetary: str) -> str: + """Generate a state label from a commetary on the user's state""" + # format prompt + state_labeling = ChatPromptTemplate.from_messages([ + cls.system_state_labeling + ]) + # LCEL + chain = state_labeling | cls.lc_gpt_4 + # inference + response = await chain.ainvoke({ + "state_commetary": state_commetary + }) + # return output + return response.content + + @classmethod + async def check_state_exists(cls, existing_states: List[str], state: str): + """Check if a user state is new or already is stored""" + + # convert existing_states to a formatted string + existing_states = "\n".join(existing_states) + + # format prompt + state_check = ChatPromptTemplate.from_messages([ + cls.system_state_check + ]) + # LCEL + chain = state_check | cls.lc_gpt_turbo + # inference + response = await chain.ainvoke({ + "existing_states": existing_states, + "state": state, + }) + # return output + return response.output + + @classmethod + async def generate_state(cls, existing_states: List[str], chat_history: List[Message], input: str): + """"Determine the user's state from the current conversation state""" + + # Generate label + state_commetary = cls.generate_state_commentary(chat_history, input) + state_label = cls.generate_state_label(state_commetary) + + # Determine if state is new + existing_state = cls.check_state_exists(existing_states, state_label) + is_state_new = existing_state is None + + # return existing state if we found one + return is_state_new, existing_state or state_label + \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py new file mode 100644 index 0000000..6283a0e --- /dev/null +++ b/example/discord/honcho-dspy-personas/graph.py @@ -0,0 +1,83 @@ +import os +import dspy +from typing import List +from dspy.teleprompt import BootstrapFewShot +from dotenv import load_dotenv +from chain import StateExtractor, format_chat_history + +from honcho import Message, Session + +load_dotenv() + +# Configure DSPy +dspy_gpt4 = dspy.OpenAI(model="gpt-4") +dspy.settings.configure(lm=dspy_gpt4) + + + +# DSPy Signatures +class Thought(dspy.Signature): + """Generate a thought about the user's needs""" + user_input = dspy.InputField() + thought = dspy.OutputField(desc="a prediction about the user's mental state") + +class Response(dspy.Signature): + """Generate a response for the user based on the thought provided""" + user_input = dspy.InputField() + thought = dspy.InputField() + response = dspy.OutputField(desc="keep the conversation going, be engaging") + +# DSPy Module +class ChatWithThought(dspy.Module): + generate_thought = dspy.Predict(Thought) + generate_response = dspy.Predict(Response) + + def forward(self, user_message: Message, session: Session, chat_input: str): + session.create_message(is_user=True, content=chat_input) + + # call the thought predictor + thought = self.generate_thought(user_input=chat_input) + session.create_metamessage(user_message, metamessage_type="thought", content=thought.thought) + + # call the response predictor + response = self.generate_response(user_input=chat_input, thought=thought.thought) + session.create_message(is_user=False, content=response.response) + + return response.response + +user_state_storage = {} +async def chat(user_message: Message, session: Session, chat_history: List[Message], input: str, optimization_threshold=5): + # first we need to take the user input and determine the user's state/dimension/persona + is_state_new, user_state = await StateExtractor.generate_state(chat_history, input) + + # Save the user_state if it's new + if is_state_new: + user_state_storage[user_state] = { + "chat_module": ChatWithThought(), + "examples": [] + } + + # then, we need to select the pipeline for that derived state/dimension/persona + # way this would work is to define the optimizer and optimize a chain once examples in a certain dimension exceed a threshold + # need a way to store the optimized chain and call it given a state/dimension/persona + # this is the reward model for a user within a state/dimension/persona + user_state_data = user_state_storage[user_state] + + # Optimize the state's chat module if we've reached the optimization threshold + examples = user_state_data["examples"] + if len(examples) >= optimization_threshold: + metric = None # TODO: Define this + + # Optimize chat module + optimizer = BootstrapFewShot(metric=metric) + compiled_chat_module = optimizer.compile(trainset=examples) + + user_state_data["chat_module"] = compiled_chat_module + + # use that pipeline to generate a response + chat_module = user_state_data["chat_module"] + chat_input = format_chat_history(chat_history, user_input=input) + + response = chat_module(user_message=user_message, session=session, input=chat_input) + + return response diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml new file mode 100644 index 0000000..ac6bdd1 --- /dev/null +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml @@ -0,0 +1,10 @@ +_type: prompt +input_variables: + ["existing_states", "state"] +template: > + Given the list of existing states, determine whether or not the new state is represented in the list of existing states. + + existing states: ```{existing_states}``` + new state: ```{state}``` + + If the new state represented in the existing states, return the existing state value. If the new state is NOT represented in existing states, return "None". Output a single value only. \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml new file mode 100644 index 0000000..d0fbf2e --- /dev/null +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml @@ -0,0 +1,8 @@ +_type: prompt +input_variables: + ["chat_history", "user_input"] +template: > + Your job is to make a prediction about the task the user might be engaging in. Some people might be researching, exploring curiosities, or just asking questions for general inquiry. Provide commentary that would shed light on the "mode" the user might be in. + + chat history: ```{chat_history}``` + user input: ```{user_input}``` \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml new file mode 100644 index 0000000..61e0353 --- /dev/null +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml @@ -0,0 +1,9 @@ +_type: prompt +input_variables: + ["state_commentary"] +template: > + Your job is to label the task the user might be engaging in. Some people might be conducting research, exploring a interest, or just asking questions for general inquiry. + + commentary: ```{state_commentary}``` + + Output your prediction as a concise, single word label. \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/metric.py b/example/discord/honcho-dspy-personas/metric.py new file mode 100644 index 0000000..9ff3ce5 --- /dev/null +++ b/example/discord/honcho-dspy-personas/metric.py @@ -0,0 +1,23 @@ +import dspy + +gpt4T = dspy.OpenAI(model='gpt-4-1106-preview', max_tokens=1000, model_type='chat') + +class MessageResponseAssess(dspy.Signature): + """Assess the quality of a response along the specified dimension.""" + user_message = dspy.InputField() + ai_response = dspy.InputField() + assessment_dimension = dspy.InputField() + assessment_answer = dspy.OutputField(desc="Good or not") + + +def assess_response_quality(user_message, ai_response, assessment_dimension): + with dspy.context(lm=gpt4T): + assessment_result = dspy.Predict(MessageResponseAssess)( + user_message=user_message, + ai_response=ai_response, + assessment_dimension=assessment_dimension + ) + + is_positive = assessment_result.assessment_answer.lower() == 'good' + + return is_positive \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/poetry.lock b/example/discord/honcho-dspy-personas/poetry.lock new file mode 100644 index 0000000..3e03a95 --- /dev/null +++ b/example/discord/honcho-dspy-personas/poetry.lock @@ -0,0 +1,2191 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.6" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alembic" +version = "1.13.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, + {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.8.2" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "datasets" +version = "2.14.7" +description = "HuggingFace community-driven open-source library of datasets" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "datasets-2.14.7-py3-none-any.whl", hash = "sha256:1a64041a7da4f4130f736fc371c1f528b8ddd208cebe156400f65719bdbba79d"}, + {file = "datasets-2.14.7.tar.gz", hash = "sha256:394cf9b4ec0694b25945977b16ad5d18d5c15fb0e94141713eb8ead7452caf9e"}, +] + +[package.dependencies] +aiohttp = "*" +dill = ">=0.3.0,<0.3.8" +fsspec = {version = ">=2023.1.0,<=2023.10.0", extras = ["http"]} +huggingface-hub = ">=0.14.0,<1.0.0" +multiprocess = "*" +numpy = ">=1.17" +packaging = "*" +pandas = "*" +pyarrow = ">=8.0.0" +pyarrow-hotfix = "*" +pyyaml = ">=5.1" +requests = ">=2.19.0" +tqdm = ">=4.62.1" +xxhash = "*" + +[package.extras] +apache-beam = ["apache-beam (>=2.26.0,<2.44.0)"] +audio = ["librosa", "soundfile (>=0.12.1)"] +benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"] +dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=23.1,<24.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "ruff (>=0.0.241)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] +docs = ["s3fs", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "transformers"] +jax = ["jax (>=0.2.8,!=0.3.2,<=0.3.25)", "jaxlib (>=0.1.65,<=0.3.25)"] +metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] +quality = ["black (>=23.1,<24.0)", "pyyaml (>=5.3.1)", "ruff (>=0.0.241)"] +s3 = ["s3fs"] +tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] +tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] +tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] +torch = ["torch"] +vision = ["Pillow (>=6.2.1)"] + +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "dspy-ai" +version = "2.1.10" +description = "DSPy" +optional = false +python-versions = ">=3.9" +files = [ + {file = "dspy-ai-2.1.10.tar.gz", hash = "sha256:d2a344c073fc84d0f3d4ec4546de7093b3aa070275369a82d169dd18df674d43"}, + {file = "dspy_ai-2.1.10-py3-none-any.whl", hash = "sha256:8d00609fbccc4433dce9ad9a97664b21ca1e24dd341d91147257985a52e76de0"}, +] + +[package.dependencies] +backoff = ">=2.2.1,<2.3.0" +datasets = ">=2.14.6,<2.15.0" +joblib = ">=1.3.2,<1.4.0" +openai = ">=0.28.1,<2.0.0" +optuna = ">=3.4.0,<3.5.0" +pandas = ">=2.1.1,<2.2.0" +regex = ">=2023.10.3,<2023.11.0" +requests = ">=2.31.0,<2.32.0" +tqdm = ">=4.66.1,<4.67.0" +ujson = ">=5.8.0,<5.9.0" + +[package.extras] +chromadb = ["chromadb (>=0.4.14,<0.5.0)"] +docs = ["autodoc-pydantic", "docutils (<0.17)", "furo (>=2023.3.27)", "m2r2", "myst-nb", "myst-parser", "pydantic (<2.0.0)", "sphinx (>=4.3.0)", "sphinx-autobuild", "sphinx-automodapi (==0.16.0)", "sphinx-reredirects (>=0.1.2)", "sphinx-rtd-theme"] +marqo = ["marqo"] +mongodb = ["pymongo (>=3.12.0,<3.13.0)"] +pinecone = ["pinecone-client (>=2.2.4,<2.3.0)"] +qdrant = ["fastembed (>=0.1.0,<0.2.0)", "qdrant-client (>=1.6.2,<1.7.0)"] +weaviate = ["weaviate-client (>=3.26.1,<3.27.0)"] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "fsspec" +version = "2023.10.0" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.10.0-py3-none-any.whl", hash = "sha256:346a8f024efeb749d2a5fca7ba8854474b1ff9af7c3faaf636a4548781136529"}, + {file = "fsspec-2023.10.0.tar.gz", hash = "sha256:330c66757591df346ad3091a53bd907e15348c2ba17d63fd54f5c39c4457d2a5"}, +] + +[package.dependencies] +aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} +requests = {version = "*", optional = true, markers = "extra == \"http\""} + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "honcho-ai" +version = "0.0.3" +description = "Python Client SDK for Honcho" +optional = false +python-versions = ">=3.10,<4.0" +files = [ + {file = "honcho_ai-0.0.3-py3-none-any.whl", hash = "sha256:a817ec62c4fd8dad1d629927511ce98a3f626f4bc55474187b80010e208e61ba"}, + {file = "honcho_ai-0.0.3.tar.gz", hash = "sha256:ca52bb8c5036bfdbeee0c71ca754c580c672b28a4824240123b783f8679ca18e"}, +] + +[package.dependencies] +httpx = ">=0.26.0,<0.27.0" + +[[package]] +name = "httpcore" +version = "1.0.3" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.24.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "huggingface-hub" +version = "0.20.3" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.20.3-py3-none-any.whl", hash = "sha256:d988ae4f00d3e307b0c80c6a05ca6dbb7edba8bba3079f74cda7d9c2e562a7b6"}, + {file = "huggingface_hub-0.20.3.tar.gz", hash = "sha256:94e7f8e074475fbc67d6a71957b678e1b4a74ff1b64a644fd6cbb83da962d05d"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "langchain-core" +version = "0.1.23" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_core-0.1.23-py3-none-any.whl", hash = "sha256:d42fac013c39a8b0bcd7e337a4cb6c17c16046c60d768f89df582ad73ec3c5cb"}, + {file = "langchain_core-0.1.23.tar.gz", hash = "sha256:34359cc8b6f8c3d45098c54a6a9b35c9f538ef58329cd943a2249d6d7b4e5806"}, +] + +[package.dependencies] +anyio = ">=3,<5" +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.0.87,<0.0.88" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-openai" +version = "0.0.6" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_openai-0.0.6-py3-none-any.whl", hash = "sha256:2ef040e4447a26a9d3bd45dfac9cefa00797ea58555a3d91ab4f88699eb3a005"}, + {file = "langchain_openai-0.0.6.tar.gz", hash = "sha256:f5c4ebe46f2c8635c8f0c26cc8df27700aacafea025410e418d5a080039974dd"}, +] + +[package.dependencies] +langchain-core = ">=0.1.16,<0.2" +numpy = ">=1,<2" +openai = ">=1.10.0,<2.0.0" +tiktoken = ">=0.5.2,<1" + +[[package]] +name = "langsmith" +version = "0.0.87" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, +] + +[package.dependencies] +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "mako" +version = "1.3.2" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, + {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "multiprocess" +version = "0.70.15" +description = "better multiprocessing and multithreading in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multiprocess-0.70.15-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa36c7ed16f508091438687fe9baa393a7a8e206731d321e443745e743a0d4e5"}, + {file = "multiprocess-0.70.15-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:20e024018c46d0d1602024c613007ac948f9754659e3853b0aa705e83f6931d8"}, + {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_i686.whl", hash = "sha256:e576062981c91f0fe8a463c3d52506e598dfc51320a8dd8d78b987dfca91c5db"}, + {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e73f497e6696a0f5433ada2b3d599ae733b87a6e8b008e387c62ac9127add177"}, + {file = "multiprocess-0.70.15-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:73db2e7b32dcc7f9b0f075c2ffa45c90b6729d3f1805f27e88534c8d321a1be5"}, + {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_i686.whl", hash = "sha256:4271647bd8a49c28ecd6eb56a7fdbd3c212c45529ad5303b40b3c65fc6928e5f"}, + {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:cf981fb998d6ec3208cb14f0cf2e9e80216e834f5d51fd09ebc937c32b960902"}, + {file = "multiprocess-0.70.15-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:18f9f2c7063346d1617bd1684fdcae8d33380ae96b99427260f562e1a1228b67"}, + {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_i686.whl", hash = "sha256:0eac53214d664c49a34695e5824872db4006b1a465edd7459a251809c3773370"}, + {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1a51dd34096db47fb21fa2b839e615b051d51b97af9a67afbcdaa67186b44883"}, + {file = "multiprocess-0.70.15-py310-none-any.whl", hash = "sha256:7dd58e33235e83cf09d625e55cffd7b0f0eede7ee9223cdd666a87624f60c21a"}, + {file = "multiprocess-0.70.15-py311-none-any.whl", hash = "sha256:134f89053d82c9ed3b73edd3a2531eb791e602d4f4156fc92a79259590bd9670"}, + {file = "multiprocess-0.70.15-py37-none-any.whl", hash = "sha256:f7d4a1629bccb433114c3b4885f69eccc200994323c80f6feee73b0edc9199c5"}, + {file = "multiprocess-0.70.15-py38-none-any.whl", hash = "sha256:bee9afba476c91f9ebee7beeee0601face9eff67d822e893f9a893725fbd6316"}, + {file = "multiprocess-0.70.15-py39-none-any.whl", hash = "sha256:3e0953f5d52b4c76f1c973eaf8214554d146f2be5decb48e928e55c7a2d19338"}, + {file = "multiprocess-0.70.15.tar.gz", hash = "sha256:f20eed3036c0ef477b07a4177cf7c1ba520d9a2677870a4f47fe026f0cd6787e"}, +] + +[package.dependencies] +dill = ">=0.3.7" + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openai" +version = "1.12.0" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-1.12.0-py3-none-any.whl", hash = "sha256:a54002c814e05222e413664f651b5916714e4700d041d5cf5724d3ae1a3e3481"}, + {file = "openai-1.12.0.tar.gz", hash = "sha256:99c5d257d09ea6533d689d1cc77caa0ac679fa21efef8893d8b0832a86877f1b"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.7,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + +[[package]] +name = "optuna" +version = "3.4.0" +description = "A hyperparameter optimization framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "optuna-3.4.0-py3-none-any.whl", hash = "sha256:4854a6e6ec68eae3f1cbf18525abde1fcf2a22dd4f3b79912e5d43f539ba8eb1"}, + {file = "optuna-3.4.0.tar.gz", hash = "sha256:aa4a2d294ca047d7dc16292707c018cc619bdde1ec435d34721311428942d2db"}, +] + +[package.dependencies] +alembic = ">=1.5.0" +colorlog = "*" +numpy = "*" +packaging = ">=20.0" +PyYAML = "*" +sqlalchemy = ">=1.3.0" +tqdm = "*" + +[package.extras] +benchmark = ["asv (>=0.5.0)", "botorch", "cma", "scikit-optimize", "virtualenv"] +checking = ["black", "blackdoc", "flake8", "isort", "mypy", "mypy-boto3-s3", "types-PyYAML", "types-redis", "types-setuptools", "types-tqdm", "typing-extensions (>=3.10.0.0)"] +document = ["ase", "botorch", "cma", "cmaes (>=0.10.0)", "distributed", "fvcore", "lightgbm", "matplotlib (!=3.6.0)", "mlflow", "pandas", "pillow", "plotly (>=4.9.0)", "scikit-learn", "scikit-optimize", "sphinx", "sphinx-copybutton", "sphinx-gallery", "sphinx-plotly-directive", "sphinx-rtd-theme (>=1.2.0)", "torch", "torchaudio", "torchvision"] +integration = ["botorch (>=0.4.0)", "catboost (>=0.26)", "catboost (>=0.26,<1.2)", "cma", "distributed", "fastai", "lightgbm", "mlflow", "pandas", "pytorch-ignite", "pytorch-lightning (>=1.6.0)", "scikit-learn (>=0.24.2)", "scikit-optimize", "shap", "tensorflow", "torch", "torchaudio", "torchvision", "wandb", "xgboost"] +optional = ["boto3", "botorch", "cmaes (>=0.10.0)", "google-cloud-storage", "matplotlib (!=3.6.0)", "pandas", "plotly (>=4.9.0)", "redis", "scikit-learn (>=0.24.2)"] +test = ["coverage", "fakeredis[lua]", "kaleido", "moto", "pytest", "scipy (>=1.9.2)"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.1.4" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, + {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, + {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, + {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, + {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, + {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, + {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, + {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, + {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, + {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] + +[[package]] +name = "py-cord" +version = "2.4.1" +description = "A Python wrapper for the Discord API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "py-cord-2.4.1.tar.gz", hash = "sha256:0266c9d9a9d2397622a0e5ead09826690e688ba3cf14c470167b81e6cd2d8a56"}, + {file = "py_cord-2.4.1-py3-none-any.whl", hash = "sha256:862a372c364cd263e2c8e696c64887f969c02cbdf0fdd6b09f0283e9dd67a290"}, +] + +[package.dependencies] +aiohttp = ">=3.6.0,<3.9.0" + +[package.extras] +docs = ["furo", "myst-parser (==0.18.1)", "sphinx (==5.3.0)", "sphinx-autodoc-typehints (==1.22)", "sphinx-copybutton (==0.5.1)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport (==1.2.4)", "sphinxext-opengraph (==0.8.1)"] +speed = ["aiohttp[speedups]", "orjson (>=3.5.4)"] +voice = ["PyNaCl (>=1.3.0,<1.6)"] + +[[package]] +name = "pyarrow" +version = "15.0.0" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, + {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, + {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, +] + +[package.dependencies] +numpy = ">=1.16.6,<2" + +[[package]] +name = "pyarrow-hotfix" +version = "0.6" +description = "" +optional = false +python-versions = ">=3.5" +files = [ + {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"}, + {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"}, +] + +[[package]] +name = "pydantic" +version = "2.6.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "regex" +version = "2023.10.3" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, + {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, + {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, + {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, + {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, + {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, + {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, + {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, + {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, + {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, + {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, + {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, + {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, + {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, + {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, + {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.27" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, + {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, + {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "tiktoken" +version = "0.6.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:277de84ccd8fa12730a6b4067456e5cf72fef6300bea61d506c09e45658d41ac"}, + {file = "tiktoken-0.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c44433f658064463650d61387623735641dcc4b6c999ca30bc0f8ba3fccaf5c"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb9a2a866ae6eef1995ab656744287a5ac95acc7e0491c33fad54d053288ad3"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62c05b3109fefca26fedb2820452a050074ad8e5ad9803f4652977778177d9f"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ef917fad0bccda07bfbad835525bbed5f3ab97a8a3e66526e48cdc3e7beacf7"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e095131ab6092d0769a2fda85aa260c7c383072daec599ba9d8b149d2a3f4d8b"}, + {file = "tiktoken-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:05b344c61779f815038292a19a0c6eb7098b63c8f865ff205abb9ea1b656030e"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cefb9870fb55dca9e450e54dbf61f904aab9180ff6fe568b61f4db9564e78871"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:702950d33d8cabc039845674107d2e6dcabbbb0990ef350f640661368df481bb"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d49d076058f23254f2aff9af603863c5c5f9ab095bc896bceed04f8f0b013a"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:430bc4e650a2d23a789dc2cdca3b9e5e7eb3cd3935168d97d43518cbb1f9a911"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:293cb8669757301a3019a12d6770bd55bec38a4d3ee9978ddbe599d68976aca7"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bd1a288b7903aadc054b0e16ea78e3171f70b670e7372432298c686ebf9dd47"}, + {file = "tiktoken-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac76e000183e3b749634968a45c7169b351e99936ef46f0d2353cd0d46c3118d"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17cc8a4a3245ab7d935c83a2db6bb71619099d7284b884f4b2aea4c74f2f83e3"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:284aebcccffe1bba0d6571651317df6a5b376ff6cfed5aeb800c55df44c78177"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c1a3a5d33846f8cd9dd3b7897c1d45722f48625a587f8e6f3d3e85080559be8"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6318b2bb2337f38ee954fd5efa82632c6e5ced1d52a671370fa4b2eff1355e91"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f5f0f2ed67ba16373f9a6013b68da298096b27cd4e1cf276d2d3868b5c7efd1"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75af4c0b16609c2ad02581f3cdcd1fb698c7565091370bf6c0cf8624ffaba6dc"}, + {file = "tiktoken-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:45577faf9a9d383b8fd683e313cf6df88b6076c034f0a16da243bb1c139340c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c1492ab90c21ca4d11cef3a236ee31a3e279bb21b3fc5b0e2210588c4209e68"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e2b380c5b7751272015400b26144a2bab4066ebb8daae9c3cd2a92c3b508fe5a"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f497598b9f58c99cbc0eb764b4a92272c14d5203fc713dd650b896a03a50ad"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e65e8bd6f3f279d80f1e1fbd5f588f036b9a5fa27690b7f0cc07021f1dfa0839"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1495450a54e564d236769d25bfefbf77727e232d7a8a378f97acddee08c1ae"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c4e4857d99f6fb4670e928250835b21b68c59250520a1941618b5b4194e20c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:168d718f07a39b013032741867e789971346df8e89983fe3c0ef3fbd5a0b1cb9"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47fdcfe11bd55376785a6aea8ad1db967db7f66ea81aed5c43fad497521819a4"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb7d2ccbf1a7784810aff6b80b4012fb42c6fc37eaa68cb3b553801a5cc2d1fc"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ccb7a111ee76af5d876a729a347f8747d5ad548e1487eeea90eaf58894b3138"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2048e1086b48e3c8c6e2ceeac866561374cd57a84622fa49a6b245ffecb7744"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07f229a5eb250b6403a61200199cecf0aac4aa23c3ecc1c11c1ca002cbb8f159"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:432aa3be8436177b0db5a2b3e7cc28fd6c693f783b2f8722539ba16a867d0c6a"}, + {file = "tiktoken-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bfe8a19c8b5c40d121ee7938cd9c6a278e5b97dc035fd61714b4f0399d2f7a1"}, + {file = "tiktoken-0.6.0.tar.gz", hash = "sha256:ace62a4ede83c75b0374a2ddfa4b76903cf483e9cb06247f566be3bf14e6beed"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "ujson" +version = "5.8.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1"}, + {file = "ujson-5.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf"}, + {file = "ujson-5.8.0-cp310-cp310-win32.whl", hash = "sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a"}, + {file = "ujson-5.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0"}, + {file = "ujson-5.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f"}, + {file = "ujson-5.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa"}, + {file = "ujson-5.8.0-cp311-cp311-win32.whl", hash = "sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879"}, + {file = "ujson-5.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721"}, + {file = "ujson-5.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c"}, + {file = "ujson-5.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30"}, + {file = "ujson-5.8.0-cp312-cp312-win32.whl", hash = "sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916"}, + {file = "ujson-5.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6"}, + {file = "ujson-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb"}, + {file = "ujson-5.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07"}, + {file = "ujson-5.8.0-cp38-cp38-win32.whl", hash = "sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564"}, + {file = "ujson-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5"}, + {file = "ujson-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c"}, + {file = "ujson-5.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc"}, + {file = "ujson-5.8.0-cp39-cp39-win32.whl", hash = "sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903"}, + {file = "ujson-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94"}, + {file = "ujson-5.8.0.tar.gz", hash = "sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "xxhash" +version = "3.4.1" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, + {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, + {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, + {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, + {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, + {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, + {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, + {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, + {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, + {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, + {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, + {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, + {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, + {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, + {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, + {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, + {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, + {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, + {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, + {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, +] + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "86f366fc87ad2958cf4b80fc161a0c5eb374c34119d938c78821ad2fabdb2e72" diff --git a/example/discord/honcho-dspy-personas/pyproject.toml b/example/discord/honcho-dspy-personas/pyproject.toml new file mode 100644 index 0000000..74275ea --- /dev/null +++ b/example/discord/honcho-dspy-personas/pyproject.toml @@ -0,0 +1,20 @@ +[tool.poetry] +name = "honcho-dspy-personas" +version = "0.1.0" +description = "" +authors = ["vintro "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" +honcho-ai = "^0.0.3" +dspy-ai = "^2.1.10" +python-dotenv = "^1.0.1" +langchain-core = "^0.1.23" +langchain-openai = "^0.0.6" +py-cord = "^2.4.1" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" From 53d952ff04926922a2d79de8f356c32484f05304 Mon Sep 17 00:00:00 2001 From: vintro Date: Tue, 20 Feb 2024 16:51:35 -0500 Subject: [PATCH 32/46] working, hit token limit and can't test dspy optimization --- example/discord/honcho-dspy-personas/bot.py | 9 +++--- example/discord/honcho-dspy-personas/chain.py | 28 ++++++++++-------- example/discord/honcho-dspy-personas/graph.py | 29 ++++++++++--------- .../langchain_prompts/state_check.yaml | 2 +- .../discord/honcho-dspy-personas/poetry.lock | 16 +++++----- .../honcho-dspy-personas/pyproject.toml | 1 + .../{metric.py => response_metric.py} | 3 +- 7 files changed, 49 insertions(+), 39 deletions(-) rename example/discord/honcho-dspy-personas/{metric.py => response_metric.py} (84%) diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index caab5ee..a79d7e9 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -2,15 +2,16 @@ from uuid import uuid1 import discord from honcho import Client as HonchoClient -from graph import langchain_message_converter, chat - +from graph import chat +from chain import langchain_message_converter intents = discord.Intents.default() intents.messages = True intents.message_content = True intents.members = True -app_id = "vince/dspy-personas" +# app_id = str(uuid1()) +app_id = "vince-dspy-personas" #honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev @@ -49,7 +50,7 @@ async def on_message(message): else: session = honcho.create_session(user_id, location_id) - history = list(session.get_messages(page_size=10)) + history = list(session.get_messages_generator()) chat_history = langchain_message_converter(history) inp = message.content diff --git a/example/discord/honcho-dspy-personas/chain.py b/example/discord/honcho-dspy-personas/chain.py index 8d19707..43c7b5e 100644 --- a/example/discord/honcho-dspy-personas/chain.py +++ b/example/discord/honcho-dspy-personas/chain.py @@ -36,9 +36,9 @@ class StateExtractor: """Wrapper class for all the DSPy and LangChain code for user state labeling and pipeline optimization""" lc_gpt_4: ChatOpenAI = ChatOpenAI(model_name = "gpt-4") lc_gpt_turbo: ChatOpenAI = ChatOpenAI(model_name = "gpt-3.5-turbo") - system_state_commentary: SystemMessagePromptTemplate = SystemMessagePromptTemplate(SYSTEM_STATE_COMMENTARY) - system_state_labeling: SystemMessagePromptTemplate = SystemMessagePromptTemplate(SYSTEM_STATE_LABELING) - system_state_check: SystemMessagePromptTemplate = SystemMessagePromptTemplate(SYSTEM_STATE_CHECK) + system_state_commentary: SystemMessagePromptTemplate = SystemMessagePromptTemplate(prompt=SYSTEM_STATE_COMMENTARY) + system_state_labeling: SystemMessagePromptTemplate = SystemMessagePromptTemplate(prompt=SYSTEM_STATE_LABELING) + system_state_check: SystemMessagePromptTemplate = SystemMessagePromptTemplate(prompt=SYSTEM_STATE_CHECK) def __init__(self) -> None: pass @@ -61,7 +61,7 @@ async def generate_state_commentary(cls, chat_history: List[Message], input: str return response.content @classmethod - async def generate_state_label(cls, state_commetary: str) -> str: + async def generate_state_label(cls, state_commentary: str) -> str: """Generate a state label from a commetary on the user's state""" # format prompt state_labeling = ChatPromptTemplate.from_messages([ @@ -71,7 +71,7 @@ async def generate_state_label(cls, state_commetary: str) -> str: chain = state_labeling | cls.lc_gpt_4 # inference response = await chain.ainvoke({ - "state_commetary": state_commetary + "state_commentary": state_commentary }) # return output return response.content @@ -95,20 +95,24 @@ async def check_state_exists(cls, existing_states: List[str], state: str): "state": state, }) # return output - return response.output + return response.content @classmethod async def generate_state(cls, existing_states: List[str], chat_history: List[Message], input: str): """"Determine the user's state from the current conversation state""" # Generate label - state_commetary = cls.generate_state_commentary(chat_history, input) - state_label = cls.generate_state_label(state_commetary) + state_commentary = await cls.generate_state_commentary(chat_history, input) + state_label = await cls.generate_state_label(state_commentary) # Determine if state is new - existing_state = cls.check_state_exists(existing_states, state_label) - is_state_new = existing_state is None + # if True, it doesn't exist, state is new + # if False, it does exist, state is not new, existing_state was returned + existing_state = await cls.check_state_exists(existing_states, state_label) + is_state_new = existing_state == "None" # return existing state if we found one - return is_state_new, existing_state or state_label - \ No newline at end of file + if is_state_new: + return is_state_new, state_label + else: + return is_state_new, existing_state diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index 6283a0e..9976371 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -4,6 +4,7 @@ from dspy.teleprompt import BootstrapFewShot from dotenv import load_dotenv from chain import StateExtractor, format_chat_history +from response_metric import metric from honcho import Message, Session @@ -47,37 +48,39 @@ def forward(self, user_message: Message, session: Session, chat_input: str): user_state_storage = {} async def chat(user_message: Message, session: Session, chat_history: List[Message], input: str, optimization_threshold=5): - # first we need to take the user input and determine the user's state/dimension/persona - is_state_new, user_state = await StateExtractor.generate_state(chat_history, input) + # first we need to see if the user has any existing states + existing_states = list(user_state_storage.keys()) + + # then we need to take the user input and determine the user's state/dimension/persona + is_state_new, user_state = await StateExtractor.generate_state(existing_states=existing_states, chat_history=chat_history, input=input) + print(f"USER STATE: {user_state}") + print(f"IS STATE NEW: {is_state_new}") + + user_chat_module = ChatWithThought() # Save the user_state if it's new if is_state_new: user_state_storage[user_state] = { - "chat_module": ChatWithThought(), + "chat_module": {}, "examples": [] } - # then, we need to select the pipeline for that derived state/dimension/persona - # way this would work is to define the optimizer and optimize a chain once examples in a certain dimension exceed a threshold - # need a way to store the optimized chain and call it given a state/dimension/persona - # this is the reward model for a user within a state/dimension/persona user_state_data = user_state_storage[user_state] # Optimize the state's chat module if we've reached the optimization threshold examples = user_state_data["examples"] if len(examples) >= optimization_threshold: - metric = None # TODO: Define this - # Optimize chat module optimizer = BootstrapFewShot(metric=metric) - compiled_chat_module = optimizer.compile(trainset=examples) + compiled_chat_module = optimizer.compile(user_chat_module, trainset=examples) + + user_state_data["chat_module"] = compiled_chat_module.dump_state() + user_chat_module = compiled_chat_module - user_state_data["chat_module"] = compiled_chat_module # use that pipeline to generate a response - chat_module = user_state_data["chat_module"] chat_input = format_chat_history(chat_history, user_input=input) - response = chat_module(user_message=user_message, session=session, input=chat_input) + response = user_chat_module(user_message=user_message, session=session, chat_input=chat_input) return response diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml index ac6bdd1..6fe2f0f 100644 --- a/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml @@ -7,4 +7,4 @@ template: > existing states: ```{existing_states}``` new state: ```{state}``` - If the new state represented in the existing states, return the existing state value. If the new state is NOT represented in existing states, return "None". Output a single value only. \ No newline at end of file + If the new state is sufficiently similar to a value in the list of existing states, return that existing state value. If the new state is NOT sufficiently similar to anything in existing states, return "None". Output a single value only. \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/poetry.lock b/example/discord/honcho-dspy-personas/poetry.lock index 3e03a95..7329a15 100644 --- a/example/discord/honcho-dspy-personas/poetry.lock +++ b/example/discord/honcho-dspy-personas/poetry.lock @@ -814,19 +814,19 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.23" +version = "0.1.24" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_core-0.1.23-py3-none-any.whl", hash = "sha256:d42fac013c39a8b0bcd7e337a4cb6c17c16046c60d768f89df582ad73ec3c5cb"}, - {file = "langchain_core-0.1.23.tar.gz", hash = "sha256:34359cc8b6f8c3d45098c54a6a9b35c9f538ef58329cd943a2249d6d7b4e5806"}, + {file = "langchain_core-0.1.24-py3-none-any.whl", hash = "sha256:1887bb2e0c12e0d94c1e805eb56d08dbb670232daf0906761f726bd507324319"}, + {file = "langchain_core-0.1.24.tar.gz", hash = "sha256:ce70f4b97695eb55637e00ee33d480fffc6db1f95726f99b076b55cb1a42927d"}, ] [package.dependencies] anyio = ">=3,<5" jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.0.87,<0.0.88" +langsmith = ">=0.1.0,<0.2.0" packaging = ">=23.2,<24.0" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -855,13 +855,13 @@ tiktoken = ">=0.5.2,<1" [[package]] name = "langsmith" -version = "0.0.87" +version = "0.1.3" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, - {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, + {file = "langsmith-0.1.3-py3-none-any.whl", hash = "sha256:b290f951d1ebff9abe2b52cc09d63acea75a9ca6e003a617310fb024eaf00f63"}, + {file = "langsmith-0.1.3.tar.gz", hash = "sha256:197bd1f5baa83db69a0eab644bab1eba8dcdf0c2d8b7c900a45916f7b3dd50ab"}, ] [package.dependencies] @@ -2188,4 +2188,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "86f366fc87ad2958cf4b80fc161a0c5eb374c34119d938c78821ad2fabdb2e72" +content-hash = "85bddbf515ca00359d2b25db6dc48e2943a136e3b65a9ed8a9537ad79bfd4eec" diff --git a/example/discord/honcho-dspy-personas/pyproject.toml b/example/discord/honcho-dspy-personas/pyproject.toml index 74275ea..6f2082b 100644 --- a/example/discord/honcho-dspy-personas/pyproject.toml +++ b/example/discord/honcho-dspy-personas/pyproject.toml @@ -13,6 +13,7 @@ python-dotenv = "^1.0.1" langchain-core = "^0.1.23" langchain-openai = "^0.0.6" py-cord = "^2.4.1" +langsmith = "^0.1.3" [build-system] diff --git a/example/discord/honcho-dspy-personas/metric.py b/example/discord/honcho-dspy-personas/response_metric.py similarity index 84% rename from example/discord/honcho-dspy-personas/metric.py rename to example/discord/honcho-dspy-personas/response_metric.py index 9ff3ce5..65c4fbb 100644 --- a/example/discord/honcho-dspy-personas/metric.py +++ b/example/discord/honcho-dspy-personas/response_metric.py @@ -10,7 +10,8 @@ class MessageResponseAssess(dspy.Signature): assessment_answer = dspy.OutputField(desc="Good or not") -def assess_response_quality(user_message, ai_response, assessment_dimension): +def metric(user_message, ai_response, assessment_dimension): + """Assess the quality of a response along the specified dimension.""" with dspy.context(lm=gpt4T): assessment_result = dspy.Predict(MessageResponseAssess)( user_message=user_message, From 8c5845f4f7ff75b5c8c616e6aa6b9ea0e6a130b9 Mon Sep 17 00:00:00 2001 From: vintro Date: Tue, 20 Feb 2024 21:51:19 -0500 Subject: [PATCH 33/46] initial version working, need to test optimization --- example/discord/honcho-dspy-personas/bot.py | 2 +- example/discord/honcho-dspy-personas/chain.py | 2 +- example/discord/honcho-dspy-personas/graph.py | 3 --- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index a79d7e9..fdee670 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -50,7 +50,7 @@ async def on_message(message): else: session = honcho.create_session(user_id, location_id) - history = list(session.get_messages_generator()) + history = list(session.get_messages_generator())[:5] chat_history = langchain_message_converter(history) inp = message.content diff --git a/example/discord/honcho-dspy-personas/chain.py b/example/discord/honcho-dspy-personas/chain.py index 43c7b5e..37316fd 100644 --- a/example/discord/honcho-dspy-personas/chain.py +++ b/example/discord/honcho-dspy-personas/chain.py @@ -54,7 +54,7 @@ async def generate_state_commentary(cls, chat_history: List[Message], input: str chain = state_commentary | cls.lc_gpt_4 # inference response = await chain.ainvoke({ - "chat_history": format_chat_history(chat_history, user_input=input), + "chat_history": chat_history, "user_input": input }) # return output diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index 9976371..c49c473 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -34,15 +34,12 @@ class ChatWithThought(dspy.Module): generate_response = dspy.Predict(Response) def forward(self, user_message: Message, session: Session, chat_input: str): - session.create_message(is_user=True, content=chat_input) - # call the thought predictor thought = self.generate_thought(user_input=chat_input) session.create_metamessage(user_message, metamessage_type="thought", content=thought.thought) # call the response predictor response = self.generate_response(user_input=chat_input, thought=thought.thought) - session.create_message(is_user=False, content=response.response) return response.response From 07651cb026b8d5d9811dd77454c5c2588ebb0727 Mon Sep 17 00:00:00 2001 From: vintro Date: Wed, 21 Feb 2024 20:30:13 -0500 Subject: [PATCH 34/46] optimizers working, but appending any example --- example/discord/honcho-dspy-personas/bot.py | 26 ++++++++++++++++ example/discord/honcho-dspy-personas/chain.py | 20 ++++++++----- example/discord/honcho-dspy-personas/graph.py | 30 ++++++++++++++----- .../langchain_prompts/state_commentary.yaml | 3 +- .../langchain_prompts/state_labeling.yaml | 10 +++++-- .../honcho-dspy-personas/response_metric.py | 17 ++++++++--- 6 files changed, 84 insertions(+), 22 deletions(-) diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index fdee670..5efd9e2 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -9,6 +9,7 @@ intents.messages = True intents.message_content = True intents.members = True +intents.reactions = True # Enable reactions intent # app_id = str(uuid1()) app_id = "vince-dspy-personas" @@ -18,6 +19,9 @@ bot = discord.Bot(intents=intents) +thumbs_up_messages = [] +thumbs_down_messages = [] + @bot.event async def on_ready(): print(f'We have logged in as {bot.user}') @@ -67,6 +71,28 @@ async def on_message(message): session.create_message(is_user=False, content=response) +@bot.event +async def on_reaction_add(reaction, user): + # Ensure the bot does not react to its own reactions + if user == bot.user: + return + + user_id = f"discord_{str(reaction.message.author.id)}" + location_id = str(reaction.message.channel.id) + + # Check if the reaction is a thumbs up + if str(reaction.emoji) == '👍': + thumbs_up_messages.append(reaction.message.content) + print(f"Added to thumbs up: {reaction.message.content}") + # Check if the reaction is a thumbs down + elif str(reaction.emoji) == '👎': + thumbs_down_messages.append(reaction.message.content) + print(f"Added to thumbs down: {reaction.message.content}") + + # TODO: we need to append these to the examples list within the user state json object + + + @bot.slash_command(name = "restart", description = "Restart the Conversation") async def restart(ctx): user_id=f"discord_{str(ctx.author.id)}" diff --git a/example/discord/honcho-dspy-personas/chain.py b/example/discord/honcho-dspy-personas/chain.py index 37316fd..aa114e1 100644 --- a/example/discord/honcho-dspy-personas/chain.py +++ b/example/discord/honcho-dspy-personas/chain.py @@ -44,8 +44,10 @@ def __init__(self) -> None: pass @classmethod - async def generate_state_commentary(cls, chat_history: List[Message], input: str) -> str: + async def generate_state_commentary(cls, existing_states: List[str], chat_history: List[Message], input: str) -> str: """Generate a commentary on the current state of the user""" + # format existing states + existing_states = "\n".join(existing_states) # format prompt state_commentary = ChatPromptTemplate.from_messages([ cls.system_state_commentary @@ -55,23 +57,27 @@ async def generate_state_commentary(cls, chat_history: List[Message], input: str # inference response = await chain.ainvoke({ "chat_history": chat_history, - "user_input": input + "user_input": input, + "existing_states": existing_states, }) # return output return response.content @classmethod - async def generate_state_label(cls, state_commentary: str) -> str: + async def generate_state_label(cls, existing_states: List[str], state_commentary: str) -> str: """Generate a state label from a commetary on the user's state""" + # format existing states + existing_states = "\n".join(existing_states) # format prompt state_labeling = ChatPromptTemplate.from_messages([ - cls.system_state_labeling + cls.system_state_labeling, ]) # LCEL chain = state_labeling | cls.lc_gpt_4 # inference response = await chain.ainvoke({ - "state_commentary": state_commentary + "state_commentary": state_commentary, + "existing_states": existing_states, }) # return output return response.content @@ -102,8 +108,8 @@ async def generate_state(cls, existing_states: List[str], chat_history: List[Mes """"Determine the user's state from the current conversation state""" # Generate label - state_commentary = await cls.generate_state_commentary(chat_history, input) - state_label = await cls.generate_state_label(state_commentary) + state_commentary = await cls.generate_state_commentary(existing_states, chat_history, input) + state_label = await cls.generate_state_label(existing_states, state_commentary) # Determine if state is new # if True, it doesn't exist, state is new diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index c49c473..7b46d05 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -1,6 +1,7 @@ import os import dspy -from typing import List +from dspy import Example +from typing import List, Optional from dspy.teleprompt import BootstrapFewShot from dotenv import load_dotenv from chain import StateExtractor, format_chat_history @@ -11,7 +12,7 @@ load_dotenv() # Configure DSPy -dspy_gpt4 = dspy.OpenAI(model="gpt-4") +dspy_gpt4 = dspy.OpenAI(model="gpt-4", max_tokens=1000) dspy.settings.configure(lm=dspy_gpt4) @@ -33,18 +34,23 @@ class ChatWithThought(dspy.Module): generate_thought = dspy.Predict(Thought) generate_response = dspy.Predict(Response) - def forward(self, user_message: Message, session: Session, chat_input: str): + def forward(self, chat_input: str, user_message: Optional[Message] = None, session: Optional[Session] = None): # call the thought predictor thought = self.generate_thought(user_input=chat_input) - session.create_metamessage(user_message, metamessage_type="thought", content=thought.thought) + + if session and user_message: + session.create_metamessage(user_message, metamessage_type="thought", content=thought.thought) # call the response predictor response = self.generate_response(user_input=chat_input, thought=thought.thought) - return response.response + # remove ai prefix + response = response.response.replace("ai:", "").strip() + + return response user_state_storage = {} -async def chat(user_message: Message, session: Session, chat_history: List[Message], input: str, optimization_threshold=5): +async def chat(user_message: Message, session: Session, chat_history: List[Message], input: str, optimization_threshold=3): # first we need to see if the user has any existing states existing_states = list(user_state_storage.keys()) @@ -66,6 +72,8 @@ async def chat(user_message: Message, session: Session, chat_history: List[Messa # Optimize the state's chat module if we've reached the optimization threshold examples = user_state_data["examples"] + print(f"Num examples: {len(examples)}") + if len(examples) >= optimization_threshold: # Optimize chat module optimizer = BootstrapFewShot(metric=metric) @@ -74,10 +82,18 @@ async def chat(user_message: Message, session: Session, chat_history: List[Messa user_state_data["chat_module"] = compiled_chat_module.dump_state() user_chat_module = compiled_chat_module + # save to file for debugging purposes + # compiled_chat_module.save("module.json") + # use that pipeline to generate a response chat_input = format_chat_history(chat_history, user_input=input) - response = user_chat_module(user_message=user_message, session=session, chat_input=chat_input) + dspy_gpt4.inspect_history(n=2) + + # append example + example = Example(chat_input=chat_input, assessment_dimension=user_state, response=response).with_inputs('chat_input') + examples.append(example) + user_state_storage[user_state]["examples"] = examples return response diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml index d0fbf2e..f1e2b90 100644 --- a/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml @@ -1,8 +1,9 @@ _type: prompt input_variables: - ["chat_history", "user_input"] + ["existing_states", "chat_history", "user_input"] template: > Your job is to make a prediction about the task the user might be engaging in. Some people might be researching, exploring curiosities, or just asking questions for general inquiry. Provide commentary that would shed light on the "mode" the user might be in. + existing states: ```{existing_states}``` chat history: ```{chat_history}``` user input: ```{user_input}``` \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml index 61e0353..c3dd8fb 100644 --- a/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml @@ -1,9 +1,13 @@ _type: prompt input_variables: - ["state_commentary"] + ["state_commentary", "existing_states"] template: > - Your job is to label the task the user might be engaging in. Some people might be conducting research, exploring a interest, or just asking questions for general inquiry. + Your job is to label the state the user might be in. Some people might be conducting research, exploring a interest, or just asking questions for general inquiry. commentary: ```{state_commentary}``` + Prior states, from oldest to most recent: ``` + {existing_states} + ```` + + Take into account the user's prior states when making your prediction. Output your prediction as a concise, single word label. - Output your prediction as a concise, single word label. \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/response_metric.py b/example/discord/honcho-dspy-personas/response_metric.py index 65c4fbb..1383c11 100644 --- a/example/discord/honcho-dspy-personas/response_metric.py +++ b/example/discord/honcho-dspy-personas/response_metric.py @@ -4,21 +4,30 @@ class MessageResponseAssess(dspy.Signature): """Assess the quality of a response along the specified dimension.""" - user_message = dspy.InputField() + chat_input = dspy.InputField() ai_response = dspy.InputField() + gold_response = dspy.InputField() assessment_dimension = dspy.InputField() assessment_answer = dspy.OutputField(desc="Good or not") -def metric(user_message, ai_response, assessment_dimension): +def metric(example, ai_response, trace=None): """Assess the quality of a response along the specified dimension.""" + + assessment_dimension = example.assessment_dimension + chat_input = example.chat_input + gold_response = example.response + with dspy.context(lm=gpt4T): assessment_result = dspy.Predict(MessageResponseAssess)( - user_message=user_message, + chat_input=chat_input, ai_response=ai_response, + gold_response=gold_response, assessment_dimension=assessment_dimension ) is_positive = assessment_result.assessment_answer.lower() == 'good' + + gpt4T.inspect_history(n=3) - return is_positive \ No newline at end of file + return is_positive From 364ba9a237fee4b58c4a6bf9a03a85cd781f6fe3 Mon Sep 17 00:00:00 2001 From: vintro Date: Wed, 21 Feb 2024 22:44:33 -0500 Subject: [PATCH 35/46] ready for user object (tbomk) --- example/discord/honcho-dspy-personas/bot.py | 5 ++++- example/discord/honcho-dspy-personas/graph.py | 7 ++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index 5efd9e2..948bdb3 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -90,7 +90,10 @@ async def on_reaction_add(reaction, user): print(f"Added to thumbs down: {reaction.message.content}") # TODO: we need to append these to the examples list within the user state json object - + # append example + # example = Example(chat_input=chat_input, assessment_dimension=user_state, response=response).with_inputs('chat_input') + # examples.append(example) + # user_state_storage[user_state]["examples"] = examples @bot.slash_command(name = "restart", description = "Restart the Conversation") diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index 7b46d05..9295a43 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -61,6 +61,7 @@ async def chat(user_message: Message, session: Session, chat_history: List[Messa user_chat_module = ChatWithThought() + # TODO: you'd want to initialize user state object from Honcho # Save the user_state if it's new if is_state_new: user_state_storage[user_state] = { @@ -71,6 +72,7 @@ async def chat(user_message: Message, session: Session, chat_history: List[Messa user_state_data = user_state_storage[user_state] # Optimize the state's chat module if we've reached the optimization threshold + # TODO: read in examples from Honcho User Object examples = user_state_data["examples"] print(f"Num examples: {len(examples)}") @@ -91,9 +93,4 @@ async def chat(user_message: Message, session: Session, chat_history: List[Messa response = user_chat_module(user_message=user_message, session=session, chat_input=chat_input) dspy_gpt4.inspect_history(n=2) - # append example - example = Example(chat_input=chat_input, assessment_dimension=user_state, response=response).with_inputs('chat_input') - examples.append(example) - user_state_storage[user_state]["examples"] = examples - return response From ad4cec37600309be7ce03c78944d95a974a569b6 Mon Sep 17 00:00:00 2001 From: Ayush Paul Date: Thu, 22 Feb 2024 17:00:41 -0500 Subject: [PATCH 36/46] Revert "add test actions and coverage" --- .github/workflows/run_coverage.yml | 51 ----------------- .github/workflows/run_tests.yml | 38 ------------ README.md | 12 ++-- sdk/poetry.lock | 92 +++++++----------------------- sdk/pyproject.toml | 1 - sdk/tests/test_sync.py | 2 - 6 files changed, 27 insertions(+), 169 deletions(-) delete mode 100644 .github/workflows/run_coverage.yml delete mode 100644 .github/workflows/run_tests.yml diff --git a/.github/workflows/run_coverage.yml b/.github/workflows/run_coverage.yml deleted file mode 100644 index bc2d780..0000000 --- a/.github/workflows/run_coverage.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: Run Coverage -on: [pull_request] -jobs: - test: - permissions: - pull-requests: write - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v3 - with: - python-version: "3.10" - - name: Install poetry - run: | - pip install poetry - - name: Syncify Client - run: | - python scripts/syncronizer.py - - name: Start Server - run: | - cd api - poetry install --no-root - poetry run uvicorn src.main:app & - sleep 5 - cd .. - env: - DATABASE_TYPE: sqlite - CONNECTION_URI: sqlite:///api.db - - name: Run Tests - run: | - cd sdk - poetry install - poetry run coverage run -m pytest - poetry run coverage xml -o coverage.xml - cd .. - - name: Code Coverage - uses: irongut/CodeCoverageSummary@v1.3.0 - with: - filename: sdk/coverage.xml - badge: true - output: file - format: markdown - - name: Add Coverage PR Comment - uses: marocchino/sticky-pull-request-comment@v2 - with: - recreate: true - path: code-coverage-results.md - - name: Stop Server - run: | - kill $(jobs -p) || true \ No newline at end of file diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml deleted file mode 100644 index 05e3aa8..0000000 --- a/.github/workflows/run_tests.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Run Tests -on: [push, pull_request] -jobs: - test: - permissions: - pull-requests: write - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v3 - with: - python-version: "3.10" - - name: Install poetry - run: | - pip install poetry - - name: Syncify Client - run: | - python scripts/syncronizer.py - - name: Start Server - run: | - cd api - poetry install --no-root - poetry run uvicorn src.main:app & - sleep 5 - cd .. - env: - DATABASE_TYPE: sqlite - CONNECTION_URI: sqlite:///api.db - - name: Run Tests - run: | - cd sdk - poetry install - poetry run pytest - cd .. - - name: Stop Server - run: | - kill $(jobs -p) || true \ No newline at end of file diff --git a/README.md b/README.md index bfd56d0..e67c360 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,10 @@ # Honcho - ![Static Badge](https://img.shields.io/badge/Version-0.0.3-blue) [![Discord](https://img.shields.io/discord/1016845111637839922?style=flat&logo=discord&logoColor=23ffffff&label=Plastic%20Labs&labelColor=235865F2)](https://discord.gg/plasticlabs) ![GitHub License](https://img.shields.io/github/license/plastic-labs/honcho) ![GitHub Repo stars](https://img.shields.io/github/stars/plastic-labs/honcho) [![X (formerly Twitter) URL](https://img.shields.io/twitter/url?url=https%3A%2F%2Ftwitter.com%2Fplastic_labs)](https://twitter.com/plastic_labs) -[![Run Tests](https://github.com/plastic-labs/honcho/actions/workflows/api_testing.yml/badge.svg?branch=staging)](https://github.com/plastic-labs/honcho/actions/workflows/api_testing.yml) - A User context management solution for building AI Agents and LLM powered applications. @@ -51,7 +48,7 @@ poetry install # install dependencies 2. Copy the `.env.template` file and specify the type of database and connection_uri. For testing sqlite is fine. The below example uses an - in-memory sqlite database. + in-memory sqlite database. > Honcho has been tested with Postgresql and PGVector @@ -93,7 +90,8 @@ docker run --env-file .env -p 8000:8000 honcho-api:latest The API can also be deployed on fly.io. Follow the [Fly.io Docs](https://fly.io/docs/getting-started/) to setup your environment and the -`flyctl`. +`flyctl`. + Once `flyctl` is set up use the following commands to launch the application: @@ -136,12 +134,12 @@ See more information [here](https://python-poetry.org/docs/cli/#add) This project is completely open source and welcomes any and all open source contributions. The workflow for contributing is to make a fork of the repository. You can claim an issue in the issues tab or start a new thread to -indicate a feature or bug fix you are working on. +indicate a feature or bug fix you are working on. Once you have finished your contribution make a PR pointed at the `staging` branch, and it will be reviewed by a project manager. Feel free to join us in our [discord](http://discord.gg/plasticlabs) to discuss your changes or get -help. +help. Once your changes are accepted and merged into staging they will undergo a period of live testing before entering the upstream into `main` diff --git a/sdk/poetry.lock b/sdk/poetry.lock index 965b0ac..e450ca8 100644 --- a/sdk/poetry.lock +++ b/sdk/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. [[package]] name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -26,6 +27,7 @@ trio = ["trio (>=0.23)"] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -37,6 +39,7 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -44,74 +47,11 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "coverage" -version = "7.4.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, -] - -[package.extras] -toml = ["tomli"] - [[package]] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -126,6 +66,7 @@ test = ["pytest (>=6)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -137,6 +78,7 @@ files = [ name = "httpcore" version = "1.0.2" description = "A minimal low-level HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -151,13 +93,14 @@ h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" version = "0.26.0" description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -168,20 +111,21 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = "==1.*" +httpcore = ">=1.0.0,<2.0.0" idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -193,6 +137,7 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -204,6 +149,7 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -215,6 +161,7 @@ files = [ name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -230,6 +177,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -252,6 +200,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.23.4" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -270,6 +219,7 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -281,6 +231,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -292,6 +243,7 @@ files = [ name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -302,4 +254,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "cfdd3c0dc8dba3a70135da5b63a3b027968bb935d4846491c7bba2f30ac20a32" +content-hash = "6ccea662fa5a5bae88618123d5d05e0d4955c234b7e1a688d2fae2f90cd9f7f8" diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index eadc19e..1455bca 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -14,7 +14,6 @@ httpx = "^0.26.0" [tool.poetry.group.test.dependencies] pytest = "^7.4.4" pytest-asyncio = "^0.23.4" -coverage = "^7.4.1" [build-system] requires = ["poetry-core"] diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index 28ad2b9..a0367ad 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,6 +1,5 @@ import pytest from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, GetDocumentPage, Session, Message, Metamessage, Document - from honcho import Client as Honcho from uuid import uuid1 @@ -344,7 +343,6 @@ def test_collection_query(): assert doc3.metadata == {"test": "test"} assert doc3.content == "the user has owned pets in the past" - result = collection.query(query="does the user own pets", top_k=2) assert result is not None From dee6ba768cc67080254ecda7f0ee8258285dbee8 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Wed, 21 Feb 2024 20:05:44 -0800 Subject: [PATCH 37/46] Refactor to add User and App Tables --- api/.vscode/settings.json | 3 + api/src/crud.py | 327 ++++++++++++++++++++++++++------------ api/src/main.py | 297 ++++++++++++++++++++++++++-------- api/src/models.py | 56 +++++-- api/src/schemas.py | 65 +++++++- 5 files changed, 570 insertions(+), 178 deletions(-) create mode 100644 api/.vscode/settings.json diff --git a/api/.vscode/settings.json b/api/.vscode/settings.json new file mode 100644 index 0000000..457f44d --- /dev/null +++ b/api/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.analysis.typeCheckingMode": "basic" +} \ No newline at end of file diff --git a/api/src/crud.py b/api/src/crud.py index e18c0ff..cde2359 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -12,13 +12,127 @@ openai_client = OpenAI() +######################################################## +# app methods +######################################################## + + +def get_app(db: Session, app_id: uuid.UUID) -> Optional[models.App]: + stmt = ( + select(models.App) + .where(models.App.id == app_id) + ) + app = db.scalars(stmt).one_or_none() + return app + +def get_app_by_name(db: Session, app_name: str) -> Optional[models.App]: + stmt = ( + select(models.App) + .where(models.App.name == app_name) + ) + app = db.scalars(stmt).one_or_none() + return app + + +# def get_apps(db: Session) -> Sequence[models.App]: +# return db.query(models.App).all() + +def create_app(db: Session, app: schemas.AppCreate) -> models.App: + honcho_app = models.App( + name=app.name, + h_metadata=app.metadata + ) + db.add(honcho_app) + db.commit() + db.refresh(honcho_app) + return honcho_app + +def update_app(db: Session, app_id: uuid.UUID, app: schemas.AppUpdate) -> models.App: + honcho_app = get_app(db, app_id) + if honcho_app is None: + raise ValueError("App not found") + if app.name is not None: + honcho_app.content = app.name + if app.metadata is not None: + honcho_app.h_metadata = app.metadata + + db.commit() + db.refresh(honcho_app) + return honcho_app + +# def delete_app(db: Session, app_id: uuid.UUID) -> bool: +# existing_app = get_app(db, app_id) +# if existing_app is None: +# return False +# db.delete(existing_app) +# db.commit() +# return True + + +######################################################## +# user methods +######################################################## + +def create_user(db: Session, app_id: uuid.UUID, user: schemas.UserCreate) -> models.User: + honcho_user = models.User( + app_id=app_id, + name=user.name, + h_metadata=user.metadata, + ) + db.add(honcho_user) + db.commit() + db.refresh(honcho_user) + return honcho_user + +def get_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID) -> Optional[models.User]: + stmt = ( + select(models.User) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) + + ) + user = db.scalars(stmt).one_or_none() + return user + +def get_users(db: Session, app_id: uuid.UUID) -> Select: + stmt = ( + select(models.User) + .where(models.User.app_id == app_id) + ) + return stmt + +def update_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID, user: schemas.UserUpdate) -> models.User: + honcho_user = get_user(db, app_id, user_id) + if honcho_user is None: + raise ValueError("User not found") + if user.name is not None: + honcho_user.content = user.name + if user.metadata is not None: + honcho_user.h_metadata = user.metadata + + db.commit() + db.refresh(honcho_user) + return honcho_user + +# def delete_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID) -> bool: +# existing_user = get_user(db, app_id, user_id) +# if existing_user is None: +# return False +# db.delete(existing_user) +# db.commit() +# return True + +######################################################## +# session methods +######################################################## def get_session( - db: Session, app_id: str, session_id: uuid.UUID, user_id: Optional[str] = None + db: Session, app_id: uuid.UUID, session_id: uuid.UUID, user_id: Optional[uuid.UUID] = None ) -> Optional[models.Session]: stmt = ( select(models.Session) - .where(models.Session.app_id == app_id) + .join(models.User, models.User.id == models.Session.user_id) + .where(models.User.app_id == app_id) .where(models.Session.id == session_id) ) if user_id is not None: @@ -26,19 +140,19 @@ def get_session( session = db.scalars(stmt).one_or_none() return session - def get_sessions( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, location_id: Optional[str] = None, reverse: Optional[bool] = False, ) -> Select: stmt = ( select(models.Session) - .where(models.Session.app_id == app_id) + .join(models.User, models.User.id == models.Session.user_id) + .where(models.User.app_id == app_id) .where(models.Session.user_id == user_id) - .where(models.Session.is_active.is_(True)) +# .where(models.Session.is_active.is_(True)) ) if reverse: @@ -53,10 +167,9 @@ def get_sessions( def create_session( - db: Session, session: schemas.SessionCreate, app_id: str, user_id: str + db: Session, session: schemas.SessionCreate, app_id: uuid.UUID, user_id: uuid.UUID ) -> models.Session: honcho_session = models.Session( - app_id=app_id, user_id=user_id, location_id=session.location_id, h_metadata=session.metadata, @@ -70,8 +183,8 @@ def create_session( def update_session( db: Session, session: schemas.SessionUpdate, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, ) -> bool: honcho_session = get_session( @@ -79,9 +192,7 @@ def update_session( ) if honcho_session is None: raise ValueError("Session not found or does not belong to user") - if ( - session.metadata is not None - ): # Need to explicitly be there won't make it empty by default + if session.metadata is not None: # Need to explicitly be there won't make it empty by default honcho_session.h_metadata = session.metadata db.commit() db.refresh(honcho_session) @@ -89,12 +200,13 @@ def update_session( def delete_session( - db: Session, app_id: str, user_id: str, session_id: uuid.UUID + db: Session, app_id: uuid.UUID, user_id: uuid.UUID, session_id: uuid.UUID ) -> bool: stmt = ( select(models.Session) + .join(models.User, models.User.id == models.Session.user_id) .where(models.Session.id == session_id) - .where(models.Session.app_id == app_id) + .where(models.User.app_id == app_id) .where(models.Session.user_id == user_id) ) honcho_session = db.scalars(stmt).one_or_none() @@ -104,12 +216,15 @@ def delete_session( db.commit() return True +######################################################## +# Message Methods +######################################################## def create_message( db: Session, message: schemas.MessageCreate, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, ) -> models.Message: honcho_session = get_session( @@ -131,16 +246,18 @@ def create_message( def get_messages( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, reverse: Optional[bool] = False, ) -> Select: stmt = ( select(models.Message) .join(models.Session, models.Session.id == models.Message.session_id) - .where(models.Session.app_id == app_id) - .where(models.Session.user_id == user_id) + .join(models.User, models.User.id == models.Session.user_id) + .join(models.App, models.App.id == models.User.app_id) + .where(models.App.id == app_id) + .where(models.User.id == user_id) .where(models.Message.session_id == session_id) ) @@ -153,28 +270,56 @@ def get_messages( def get_message( - db: Session, app_id: str, user_id: str, session_id: uuid.UUID, message_id: uuid.UUID + db: Session, app_id: uuid.UUID, user_id: uuid.UUID, session_id: uuid.UUID, message_id: uuid.UUID ) -> Optional[models.Message]: stmt = ( select(models.Message) .join(models.Session, models.Session.id == models.Message.session_id) - .where(models.Session.app_id == app_id) - .where(models.Session.user_id == user_id) + .join(models.User, models.User.id == models.Session.user_id) + .join(models.App, models.App.id == models.User.app_id) + .where(models.App.id == app_id) + .where(models.User.id == user_id) .where(models.Message.session_id == session_id) .where(models.Message.id == message_id) ) return db.scalars(stmt).one_or_none() - ######################################################## # metamessage methods ######################################################## +def create_metamessage( + db: Session, + metamessage: schemas.MetamessageCreate, + app_id: uuid.UUID, + user_id: uuid.UUID, + session_id: uuid.UUID, +): + message = get_message( + db, + app_id=app_id, + session_id=session_id, + user_id=user_id, + message_id=metamessage.message_id, + ) + if message is None: + raise ValueError("Session not found or does not belong to user") + + honcho_metamessage = models.Metamessage( + message_id=metamessage.message_id, + metamessage_type=metamessage.metamessage_type, + content=metamessage.content, + ) + + db.add(honcho_metamessage) + db.commit() + db.refresh(honcho_metamessage) + return honcho_metamessage def get_metamessages( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, message_id: Optional[uuid.UUID], metamessage_type: Optional[str] = None, @@ -184,8 +329,10 @@ def get_metamessages( select(models.Metamessage) .join(models.Message, models.Message.id == models.Metamessage.message_id) .join(models.Session, models.Message.session_id == models.Session.id) - .where(models.Session.app_id == app_id) - .where(models.Session.user_id == user_id) + .join(models.User, models.User.id == models.Session.user_id) + .join(models.App, models.App.id == models.User.app_id) + .where(models.App.id == app_id) + .where(models.User.id == user_id) .where(models.Message.session_id == session_id) ) @@ -205,8 +352,8 @@ def get_metamessages( def get_metamessage( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, message_id: uuid.UUID, metamessage_id: uuid.UUID, @@ -215,8 +362,10 @@ def get_metamessage( select(models.Metamessage) .join(models.Message, models.Message.id == models.Metamessage.message_id) .join(models.Session, models.Message.session_id == models.Session.id) - .where(models.Session.app_id == app_id) - .where(models.Session.user_id == user_id) + .join(models.User, models.User.id == models.Session.user_id) + .join(models.App, models.App.id == models.User.app_id) + .where(models.App.id == app_id) + .where(models.User.id == user_id) .where(models.Message.session_id == session_id) .where(models.Metamessage.message_id == message_id) .where(models.Metamessage.id == metamessage_id) @@ -224,35 +373,6 @@ def get_metamessage( return db.scalars(stmt).one_or_none() -def create_metamessage( - db: Session, - metamessage: schemas.MetamessageCreate, - app_id: str, - user_id: str, - session_id: uuid.UUID, -): - message = get_message( - db, - app_id=app_id, - session_id=session_id, - user_id=user_id, - message_id=metamessage.message_id, - ) - if message is None: - raise ValueError("Session not found or does not belong to user") - - honcho_metamessage = models.Metamessage( - message_id=metamessage.message_id, - metamessage_type=metamessage.metamessage_type, - content=metamessage.content, - ) - - db.add(honcho_metamessage) - db.commit() - db.refresh(honcho_metamessage) - return honcho_metamessage - - ######################################################## # collection methods ######################################################## @@ -261,13 +381,14 @@ def create_metamessage( def get_collections( - db: Session, app_id: str, user_id: str, reverse: Optional[bool] = False + db: Session, app_id: uuid.UUID, user_id: uuid.UUID, reverse: Optional[bool] = False ) -> Select: """Get a distinct list of the names of collections associated with a user""" stmt = ( select(models.Collection) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) ) if reverse: @@ -279,12 +400,13 @@ def get_collections( def get_collection_by_id( - db: Session, app_id: str, user_id: str, collection_id: uuid.UUID + db: Session, app_id: uuid.UUID, user_id: uuid.UUID, collection_id: uuid.UUID ) -> Optional[models.Collection]: stmt = ( select(models.Collection) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) .where(models.Collection.id == collection_id) ) collection = db.scalars(stmt).one_or_none() @@ -292,12 +414,13 @@ def get_collection_by_id( def get_collection_by_name( - db: Session, app_id: str, user_id: str, name: str + db: Session, app_id: uuid.UUID, user_id: uuid.UUID, name: str ) -> Optional[models.Collection]: stmt = ( select(models.Collection) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) .where(models.Collection.name == name) ) collection = db.scalars(stmt).one_or_none() @@ -305,10 +428,9 @@ def get_collection_by_name( def create_collection( - db: Session, collection: schemas.CollectionCreate, app_id: str, user_id: str + db: Session, collection: schemas.CollectionCreate, app_id: uuid.UUID, user_id: uuid.UUID ) -> models.Collection: honcho_collection = models.Collection( - app_id=app_id, user_id=user_id, name=collection.name, ) @@ -325,8 +447,8 @@ def create_collection( def update_collection( db: Session, collection: schemas.CollectionUpdate, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, ) -> models.Collection: honcho_collection = get_collection_by_id( @@ -345,7 +467,7 @@ def update_collection( def delete_collection( - db: Session, app_id: str, user_id: str, collection_id: uuid.UUID + db: Session, app_id: uuid.UUID, user_id: uuid.UUID, collection_id: uuid.UUID ) -> bool: """ Delete a Collection and all documents associated with it. Takes advantage of @@ -353,9 +475,10 @@ def delete_collection( """ stmt = ( select(models.Collection) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) .where(models.Collection.id == collection_id) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) ) honcho_collection = db.scalars(stmt).one_or_none() if honcho_collection is None: @@ -374,16 +497,17 @@ def delete_collection( def get_documents( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, reverse: Optional[bool] = False, ) -> Select: stmt = ( select(models.Document) .join(models.Collection, models.Collection.id == models.Document.collection_id) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) .where(models.Document.collection_id == collection_id) ) @@ -397,16 +521,17 @@ def get_documents( def get_document( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, document_id: uuid.UUID, ) -> Optional[models.Document]: stmt = ( select(models.Document) .join(models.Collection, models.Collection.id == models.Document.collection_id) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) .where(models.Document.collection_id == collection_id) .where(models.Document.id == document_id) ) @@ -417,8 +542,8 @@ def get_document( def query_documents( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, query: str, top_k: int = 5, @@ -430,8 +555,9 @@ def query_documents( stmt = ( select(models.Document) .join(models.Collection, models.Collection.id == models.Document.collection_id) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) .where(models.Document.collection_id == collection_id) .order_by(models.Document.embedding.cosine_distance(embedding_query)) .limit(top_k) @@ -444,8 +570,8 @@ def query_documents( def create_document( db: Session, document: schemas.DocumentCreate, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, ) -> models.Document: """Embed a message as a vector and create a document""" @@ -476,8 +602,8 @@ def create_document( def update_document( db: Session, document: schemas.DocumentUpdate, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, document_id: uuid.UUID, ) -> bool: @@ -497,7 +623,7 @@ def update_document( ) embedding = response.data[0].embedding honcho_document.embedding = embedding - honcho_document.created_at = datetime.datetime.now() + honcho_document.created_at = datetime.datetime.utcnow() if document.metadata is not None: honcho_document.h_metadata = document.metadata @@ -508,16 +634,17 @@ def update_document( def delete_document( db: Session, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, document_id: uuid.UUID, ) -> bool: stmt = ( select(models.Document) .join(models.Collection, models.Collection.id == models.Document.collection_id) - .where(models.Collection.app_id == app_id) - .where(models.Collection.user_id == user_id) + .join(models.User, models.User.id == models.Collection.user_id) + .where(models.User.app_id == app_id) + .where(models.User.id == user_id) .where(models.Document.collection_id == collection_id) .where(models.Document.id == document_id) ) diff --git a/api/src/main.py b/api/src/main.py index 0c658e9..bbdf46c 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -39,6 +39,175 @@ def get_db(): finally: db.close() +######################################################## +# App Routes +######################################################## +@app.get("/apps/{app_id}", response_model=schemas.App) +def get_app( + request: Request, + app_id: uuid.UUID, + db: Session = Depends(get_db), +): + """Get an App by ID + + Args: + app_id (uuid.UUID): The ID of the app + + Returns: + schemas.App: App object + + """ + app = crud.get_app(db, app_id=app_id) + if app is None: + raise HTTPException(status_code=404, detail="App not found") + return app + +@app.get("/apps/name/{app_name}", response_model=schemas.App) +def get_app_by_name( + request: Request, + app_name: str, + db: Session = Depends(get_db), +): + """Get an App by Name + + Args: + app_name (str): The name of the app + + Returns: + schemas.App: App object + + """ + app = crud.get_app_by_name(db, app_name=app_name) + if app is None: + raise HTTPException(status_code=404, detail="App not found") + return app + + +@app.post("/apps", response_model=schemas.App) +def create_app( + request: Request, + app: schemas.AppCreate, + db: Session = Depends(get_db), +): + """Create an App + + Args: + app (schemas.AppCreate): The App object containing any metadata + + Returns: + schemas.App: Created App object + + """ + return crud.create_app(db, app=app) + + +@app.put("/apps/{app_id}", response_model=schemas.App) +def update_app( + request: Request, + app_id: uuid.UUID, + app: schemas.AppUpdate, + db: Session = Depends(get_db), +): + """Update an App + + Args: + app_id (uuid.UUID): The ID of the app to update + app (schemas.AppUpdate): The App object containing any new metadata + + Returns: + schemas.App: The App object of the updated App + + """ + honcho_app = crud.update_app(db, app_id=app_id, app=app) + if honcho_app is None: + raise HTTPException(status_code=404, detail="App not found") + return honcho_app + + +######################################################## +# User Routes +######################################################## + + +@app.post("/apps/{app_id}/users", response_model=schemas.User) +def create_user( + request: Request, + app_id: uuid.UUID, + user: schemas.UserCreate, + db: Session = Depends(get_db), +): + """Create a User + + Args: + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user (schemas.UserCreate): The User object containing any metadata + + Returns: + schemas.User: Created User object + + """ + return crud.create_user(db, app_id=app_id, user=user) + + +@router.get("/apps/{app_id}/users", response_model=Page[schemas.User]) +def get_users( + request: Request, + app_id: uuid.UUID, + db: Session = Depends(get_db), +): + """Get All Users for an App + + Args: + app_id (uuid.UUID): The ID of the app representing the client application using honcho + + Returns: + list[schemas.User]: List of User objects + + """ + return paginate(db, crud.get_users(db, app_id=app_id)) + + +@router.get("/apps/{app_id}/users/{user_id}", response_model=schemas.User) +def get_user( + request: Request, + app_id: uuid.UUID, + user_id: uuid.UUID, + db: Session = Depends(get_db), +): + """Get a User + + Args: + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (str): The User ID representing the user, managed by the user + + Returns: + schemas.User: User object + + """ + return crud.get_user(db, app_id=app_id, user_id=user_id) + + +@router.put("/users/{user_id}", response_model=schemas.User) +def update_user( + request: Request, + app_id: uuid.UUID, + user_id: uuid.UUID, + user: schemas.UserCreate, + db: Session = Depends(get_db), +): + """Update a User + + Args: + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (str): The User ID representing the user, managed by the user + user (schemas.UserCreate): The User object containing any metadata + + Returns: + schemas.User: Updated User object + + """ + return crud.update_user(db, app_id=app_id, user_id=user_id, user=user) + ######################################################## # Session Routes @@ -48,8 +217,8 @@ def get_db(): @router.get("/sessions", response_model=Page[schemas.Session]) def get_sessions( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, location_id: Optional[str] = None, reverse: Optional[bool] = False, db: Session = Depends(get_db), @@ -57,8 +226,8 @@ def get_sessions( """Get All Sessions for a User Args: - app_id (str): The ID of the app representing the client application using honcho - user_id (str): The User ID representing the user, managed by the user + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (uuid.UUID): The User ID representing the user, managed by the user location_id (str, optional): Optional Location ID representing the location of a session Returns: @@ -76,16 +245,16 @@ def get_sessions( @router.post("/sessions", response_model=schemas.Session) def create_session( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session: schemas.SessionCreate, db: Session = Depends(get_db), ): """Create a Session for a User Args: - app_id (str): The ID of the app representing the client application using honcho - user_id (str): The User ID representing the user, managed by the user + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (uuid.UUID): The User ID representing the user, managed by the user session (schemas.SessionCreate): The Session object containing any metadata and a location ID Returns: @@ -99,8 +268,8 @@ def create_session( @router.put("/sessions/{session_id}", response_model=schemas.Session) def update_session( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, session: schemas.SessionUpdate, db: Session = Depends(get_db), @@ -108,9 +277,9 @@ def update_session( """Update the metadata of a Session Args: - app_id (str): The ID of the app representing the client application using honcho - user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to update + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (uuid.UUID): The User ID representing the user, managed by the user + session_id (uuid.UUID): The ID of the Session to update session (schemas.SessionUpdate): The Session object containing any new metadata Returns: @@ -132,17 +301,17 @@ def update_session( @router.delete("/sessions/{session_id}") def delete_session( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, db: Session = Depends(get_db), ): """Delete a session by marking it as inactive Args: - app_id (str): The ID of the app representing the client application using honcho - user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to delete + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (uuid.UUID): The User ID representing the user, managed by the user + session_id (uuid.UUID): The ID of the Session to delete Returns: dict: A message indicating that the session was deleted @@ -163,17 +332,17 @@ def delete_session( @router.get("/sessions/{session_id}", response_model=schemas.Session) def get_session( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, db: Session = Depends(get_db), ): """Get a specific session for a user by ID Args: - app_id (str): The ID of the app representing the client application using honcho - user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to retrieve + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (uuid.UUID): The User ID representing the user, managed by the user + session_id (uuid.UUID): The ID of the Session to retrieve Returns: schemas.Session: The Session object of the requested Session @@ -197,8 +366,8 @@ def get_session( @router.post("/sessions/{session_id}/messages", response_model=schemas.Message) def create_message_for_session( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, message: schemas.MessageCreate, db: Session = Depends(get_db), @@ -206,7 +375,7 @@ def create_message_for_session( """Adds a message to a session Args: - app_id (str): The ID of the app representing the client application using honcho + app_id (uuid.UUID): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to add the message to message (schemas.MessageCreate): The Message object to add containing the message content and type @@ -229,8 +398,8 @@ def create_message_for_session( @router.get("/sessions/{session_id}/messages", response_model=Page[schemas.Message]) def get_messages( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, reverse: Optional[bool] = False, db: Session = Depends(get_db), @@ -238,7 +407,7 @@ def get_messages( """Get all messages for a session Args: - app_id (str): The ID of the app representing the client application using honcho + app_id (uuid.UUID): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to retrieve reverse (bool): Whether to reverse the order of the messages @@ -270,8 +439,8 @@ def get_messages( ) def get_message( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, message_id: uuid.UUID, db: Session = Depends(get_db), @@ -293,8 +462,8 @@ def get_message( @router.post("/sessions/{session_id}/metamessages", response_model=schemas.Metamessage) def create_metamessage( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, metamessage: schemas.MetamessageCreate, db: Session = Depends(get_db), @@ -302,7 +471,7 @@ def create_metamessage( """Adds a message to a session Args: - app_id (str): The ID of the app representing the client application using honcho + app_id (uuid.UUID): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to add the message to message (schemas.MessageCreate): The Message object to add containing the message content and type @@ -331,8 +500,8 @@ def create_metamessage( ) def get_metamessages( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, message_id: Optional[uuid.UUID] = None, metamessage_type: Optional[str] = None, @@ -342,7 +511,7 @@ def get_metamessages( """Get all messages for a session Args: - app_id (str): The ID of the app representing the client application using honcho + app_id (uuid.UUID): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to retrieve reverse (bool): Whether to reverse the order of the metamessages @@ -377,8 +546,8 @@ def get_metamessages( ) def get_metamessage( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, session_id: uuid.UUID, message_id: uuid.UUID, metamessage_id: uuid.UUID, @@ -387,7 +556,7 @@ def get_metamessage( """Get a specific session for a user by ID Args: - app_id (str): The ID of the app representing the client application using honcho + app_id (uuid.UUID): The ID of the app representing the client application using honcho user_id (str): The User ID representing the user, managed by the user session_id (int): The ID of the Session to retrieve @@ -418,8 +587,8 @@ def get_metamessage( @router.get("/collections/all", response_model=Page[schemas.Collection]) def get_collections( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, reverse: Optional[bool] = False, db: Session = Depends(get_db), ): @@ -431,8 +600,8 @@ def get_collections( @router.get("/collections/id/{collection_id}", response_model=schemas.Collection) def get_collection_by_id( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, db: Session = Depends(get_db), ) -> schemas.Collection: @@ -449,8 +618,8 @@ def get_collection_by_id( @router.get("/collections/name/{name}", response_model=schemas.Collection) def get_collection_by_name( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, name: str, db: Session = Depends(get_db), ) -> schemas.Collection: @@ -467,8 +636,8 @@ def get_collection_by_name( @router.post("/collections", response_model=schemas.Collection) def create_collection( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection: schemas.CollectionCreate, db: Session = Depends(get_db), ): @@ -486,8 +655,8 @@ def create_collection( @router.put("/collections/{collection_id}", response_model=schemas.Collection) def update_collection( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, collection: schemas.CollectionUpdate, db: Session = Depends(get_db), @@ -515,8 +684,8 @@ def update_collection( @router.delete("/collections/{collection_id}") def delete_collection( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, db: Session = Depends(get_db), ): @@ -541,8 +710,8 @@ def delete_collection( ) def get_documents( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, reverse: Optional[bool] = False, db: Session = Depends(get_db), @@ -574,8 +743,8 @@ def get_documents( def get_document( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, document_id: uuid.UUID, db: Session = Depends(get_db), @@ -599,8 +768,8 @@ def get_document( ) def query_documents( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, query: str, top_k: int = 5, @@ -621,8 +790,8 @@ def query_documents( @router.post("/collections/{collection_id}/documents", response_model=schemas.Document) def create_document( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, document: schemas.DocumentCreate, db: Session = Depends(get_db), @@ -647,8 +816,8 @@ def create_document( ) def update_document( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, document_id: uuid.UUID, document: schemas.DocumentUpdate, @@ -671,8 +840,8 @@ def update_document( @router.delete("/collections/{collection_id}/documents/{document_id}") def delete_document( request: Request, - app_id: str, - user_id: str, + app_id: uuid.UUID, + user_id: uuid.UUID, collection_id: uuid.UUID, document_id: uuid.UUID, db: Session = Depends(get_db), diff --git a/api/src/models.py b/api/src/models.py index ce20747..a4c166c 100644 --- a/api/src/models.py +++ b/api/src/models.py @@ -4,7 +4,7 @@ from dotenv import load_dotenv from pgvector.sqlalchemy import Vector -from sqlalchemy import JSON, Column, ForeignKey, String, UniqueConstraint, Uuid +from sqlalchemy import JSON, Column, DateTime, ForeignKey, String, UniqueConstraint, Uuid from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, mapped_column, relationship @@ -15,22 +15,55 @@ DATABASE_TYPE = os.getenv("DATABASE_TYPE", "postgres") ColumnType = JSONB if DATABASE_TYPE == "postgres" else JSON + +class App(Base): + __tablename__ = "apps" + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, index=True, default=uuid.uuid4 + ) + name: Mapped[str] = mapped_column(String(512), index=True, unique=True) + users = relationship("User", back_populates="app") + created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), + default=datetime.datetime.utcnow + ) + h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) + # Add any additional fields for an app here + +class User(Base): + __tablename__ = "users" + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, index=True, default=uuid.uuid4 + ) + name: Mapped[str] = mapped_column(String(512), index=True) + h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) + created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), + default=datetime.datetime.utcnow + ) + app_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("apps.id"), index=True) + app = relationship("App", back_populates="users") + sessions = relationship("Session", back_populates="user") + collections = relationship("Collection", back_populates="user") + def __repr__(self) -> str: + return f"User(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, created_at={self.created_at}, h_metadata={self.h_metadata})" class Session(Base): __tablename__ = "sessions" id: Mapped[uuid.UUID] = mapped_column( primary_key=True, index=True, default=uuid.uuid4 ) - app_id: Mapped[str] = mapped_column(String(512), index=True) - user_id: Mapped[str] = mapped_column(String(512), index=True) location_id: Mapped[str] = mapped_column(String(512), index=True, default="default") is_active: Mapped[bool] = mapped_column(default=True) h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), default=datetime.datetime.utcnow ) messages = relationship("Message", back_populates="session") + user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"), index=True) + user = relationship("User", back_populates="sessions") def __repr__(self) -> str: return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, is_active={self.is_active}, created_at={self.created_at}, h_metadata={self.h_metadata})" @@ -41,11 +74,12 @@ class Message(Base): id: Mapped[uuid.UUID] = mapped_column( primary_key=True, index=True, default=uuid.uuid4 ) - session_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("sessions.id")) + session_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("sessions.id"), index=True) is_user: Mapped[bool] content: Mapped[str] = mapped_column(String(65535)) created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), default=datetime.datetime.utcnow ) session = relationship("Session", back_populates="messages") @@ -62,10 +96,11 @@ class Metamessage(Base): ) metamessage_type: Mapped[str] = mapped_column(String(512), index=True) content: Mapped[str] = mapped_column(String(65535)) - message_id = Column(Uuid, ForeignKey("messages.id")) + message_id = Column(Uuid, ForeignKey("messages.id"), index=True) message = relationship("Message", back_populates="metamessages") created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), default=datetime.datetime.utcnow ) @@ -79,20 +114,20 @@ class Collection(Base): primary_key=True, index=True, default=uuid.uuid4 ) name: Mapped[str] = mapped_column(String(512), index=True) - app_id: Mapped[str] = mapped_column(String(512), index=True) - user_id: Mapped[str] = mapped_column(String(512), index=True) created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), default=datetime.datetime.utcnow ) documents = relationship( "Document", back_populates="collection", cascade="all, delete, delete-orphan" ) + user = relationship("User", back_populates="collections") + user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"), index=True) __table_args__ = ( - UniqueConstraint("name", "app_id", "user_id", name="unique_name_app_user"), + UniqueConstraint("name", "user_id", name="unique_name_app_user"), ) - class Document(Base): __tablename__ = "documents" id: Mapped[uuid.UUID] = mapped_column( @@ -102,8 +137,9 @@ class Document(Base): content: Mapped[str] = mapped_column(String(65535)) embedding = mapped_column(Vector(1536)) created_at: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), default=datetime.datetime.utcnow ) - collection_id = Column(Uuid, ForeignKey("collections.id")) + collection_id = Column(Uuid, ForeignKey("collections.id"), index=True) collection = relationship("Collection", back_populates="documents") diff --git a/api/src/schemas.py b/api/src/schemas.py index fe164aa..429fb1b 100644 --- a/api/src/schemas.py +++ b/api/src/schemas.py @@ -2,6 +2,65 @@ import datetime import uuid +class AppBase(BaseModel): + pass + +class AppCreate(AppBase): + name: str + metadata: dict | None = {} + +class AppUpdate(AppBase): + name: str | None = None + metadata: dict | None = None + +class App(AppBase): + id: uuid.UUID + name: str + h_metadata: dict + metadata: dict + created_at: datetime.datetime + + @validator('metadata', pre=True, allow_reuse=True) + def fetch_h_metadata(cls, value, values): + if 'h_metadata' in values: + return values['h_metadata'] + return {} + + class Config: + from_attributes = True + schema_extra ={ + "exclude": ["h_metadata"] + } + +class UserBase(BaseModel): + pass + +class UserCreate(UserBase): + name: str + metadata: dict | None = {} + +class UserUpdate(UserBase): + name: str | None = None + metadata: dict | None = None + +class User(UserBase): + id: uuid.UUID + app_id: uuid.UUID + created_at: datetime.datetime + h_metadata: dict + metadata: dict + + @validator('metadata', pre=True, allow_reuse=True) + def fetch_h_metadata(cls, value, values): + if 'h_metadata' in values: + return values['h_metadata'] + return {} + + class Config: + from_attributes = True + schema_extra = { + "exclude": ["h_metadata"] + } class MessageBase(BaseModel): content: str @@ -37,7 +96,6 @@ class Session(SessionBase): is_active: bool user_id: str location_id: str - app_id: str h_metadata: dict metadata: dict created_at: datetime.datetime @@ -70,7 +128,7 @@ class Metamessage(MetamessageBase): created_at: datetime.datetime class Config: - orm_mode = True + from_attributes = True class CollectionBase(BaseModel): pass @@ -84,12 +142,11 @@ class CollectionUpdate(CollectionBase): class Collection(CollectionBase): id: uuid.UUID name: str - app_id: str user_id: str created_at: datetime.datetime class Config: - orm_mode = True + from_attributes = True class DocumentBase(BaseModel): content: str From 569870c48b582f2135b0ee8b1fea72e8ca2bd43e Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 22 Feb 2024 02:54:52 -0800 Subject: [PATCH 38/46] User Object passing test cases --- api/pyproject.toml | 17 ++ api/src/crud.py | 89 +++++-- api/src/main.py | 101 ++++--- api/src/models.py | 41 +-- api/src/schemas.py | 69 +++-- sdk/honcho/__init__.py | 24 +- sdk/honcho/client.py | 546 +++++++++++++++++++++++++------------- sdk/honcho/sync_client.py | 546 +++++++++++++++++++++++++------------- sdk/pyproject.toml | 19 ++ sdk/tests/test_async.py | 290 ++++++++++++-------- sdk/tests/test_sync.py | 290 ++++++++++++-------- 11 files changed, 1339 insertions(+), 693 deletions(-) diff --git a/api/pyproject.toml b/api/pyproject.toml index 2c828a9..2aaee65 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -17,6 +17,23 @@ fastapi-pagination = "^0.12.14" pgvector = "^0.2.5" openai = "^1.12.0" +[tool.ruff.lint] +# from https://docs.astral.sh/ruff/linter/#rule-selection example +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", +] + [build-system] requires = ["poetry-core"] diff --git a/api/src/crud.py b/api/src/crud.py index cde2359..a4670b0 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -1,12 +1,11 @@ -import uuid import datetime +import uuid from typing import Optional, Sequence from openai import OpenAI - -from sqlalchemy import select, Select -from sqlalchemy.orm import Session +from sqlalchemy import Select, select from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import Session from . import models, schemas @@ -18,18 +17,13 @@ def get_app(db: Session, app_id: uuid.UUID) -> Optional[models.App]: - stmt = ( - select(models.App) - .where(models.App.id == app_id) - ) + stmt = select(models.App).where(models.App.id == app_id) app = db.scalars(stmt).one_or_none() return app + def get_app_by_name(db: Session, app_name: str) -> Optional[models.App]: - stmt = ( - select(models.App) - .where(models.App.name == app_name) - ) + stmt = select(models.App).where(models.App.name == app_name) app = db.scalars(stmt).one_or_none() return app @@ -37,16 +31,15 @@ def get_app_by_name(db: Session, app_name: str) -> Optional[models.App]: # def get_apps(db: Session) -> Sequence[models.App]: # return db.query(models.App).all() + def create_app(db: Session, app: schemas.AppCreate) -> models.App: - honcho_app = models.App( - name=app.name, - h_metadata=app.metadata - ) + honcho_app = models.App(name=app.name, h_metadata=app.metadata) db.add(honcho_app) db.commit() db.refresh(honcho_app) return honcho_app + def update_app(db: Session, app_id: uuid.UUID, app: schemas.AppUpdate) -> models.App: honcho_app = get_app(db, app_id) if honcho_app is None: @@ -60,6 +53,7 @@ def update_app(db: Session, app_id: uuid.UUID, app: schemas.AppUpdate) -> models db.refresh(honcho_app) return honcho_app + # def delete_app(db: Session, app_id: uuid.UUID) -> bool: # existing_app = get_app(db, app_id) # if existing_app is None: @@ -73,7 +67,10 @@ def update_app(db: Session, app_id: uuid.UUID, app: schemas.AppUpdate) -> models # user methods ######################################################## -def create_user(db: Session, app_id: uuid.UUID, user: schemas.UserCreate) -> models.User: + +def create_user( + db: Session, app_id: uuid.UUID, user: schemas.UserCreate +) -> models.User: honcho_user = models.User( app_id=app_id, name=user.name, @@ -84,24 +81,44 @@ def create_user(db: Session, app_id: uuid.UUID, user: schemas.UserCreate) -> mod db.refresh(honcho_user) return honcho_user -def get_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID) -> Optional[models.User]: + +def get_user( + db: Session, app_id: uuid.UUID, user_id: uuid.UUID +) -> Optional[models.User]: stmt = ( select(models.User) .where(models.User.app_id == app_id) .where(models.User.id == user_id) - ) user = db.scalars(stmt).one_or_none() return user -def get_users(db: Session, app_id: uuid.UUID) -> Select: + +def get_user_by_name( + db: Session, app_id: uuid.UUID, name: str +) -> Optional[models.User]: stmt = ( select(models.User) .where(models.User.app_id == app_id) + .where(models.User.name == name) ) + user = db.scalars(stmt).one_or_none() + return user + + +def get_users(db: Session, app_id: uuid.UUID, reverse: bool = False) -> Select: + stmt = select(models.User).where(models.User.app_id == app_id) + if reverse: + stmt = stmt.order_by(models.User.created_at.desc()) + else: + stmt = stmt.order_by(models.User.created_at) + return stmt -def update_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID, user: schemas.UserUpdate) -> models.User: + +def update_user( + db: Session, app_id: uuid.UUID, user_id: uuid.UUID, user: schemas.UserUpdate +) -> models.User: honcho_user = get_user(db, app_id, user_id) if honcho_user is None: raise ValueError("User not found") @@ -114,6 +131,7 @@ def update_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID, user: schema db.refresh(honcho_user) return honcho_user + # def delete_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID) -> bool: # existing_user = get_user(db, app_id, user_id) # if existing_user is None: @@ -126,8 +144,12 @@ def update_user(db: Session, app_id: uuid.UUID, user_id: uuid.UUID, user: schema # session methods ######################################################## + def get_session( - db: Session, app_id: uuid.UUID, session_id: uuid.UUID, user_id: Optional[uuid.UUID] = None + db: Session, + app_id: uuid.UUID, + session_id: uuid.UUID, + user_id: Optional[uuid.UUID] = None, ) -> Optional[models.Session]: stmt = ( select(models.Session) @@ -140,6 +162,7 @@ def get_session( session = db.scalars(stmt).one_or_none() return session + def get_sessions( db: Session, app_id: uuid.UUID, @@ -152,7 +175,7 @@ def get_sessions( .join(models.User, models.User.id == models.Session.user_id) .where(models.User.app_id == app_id) .where(models.Session.user_id == user_id) -# .where(models.Session.is_active.is_(True)) + # .where(models.Session.is_active.is_(True)) ) if reverse: @@ -192,7 +215,9 @@ def update_session( ) if honcho_session is None: raise ValueError("Session not found or does not belong to user") - if session.metadata is not None: # Need to explicitly be there won't make it empty by default + if ( + session.metadata is not None + ): # Need to explicitly be there won't make it empty by default honcho_session.h_metadata = session.metadata db.commit() db.refresh(honcho_session) @@ -216,10 +241,12 @@ def delete_session( db.commit() return True + ######################################################## # Message Methods ######################################################## + def create_message( db: Session, message: schemas.MessageCreate, @@ -270,7 +297,11 @@ def get_messages( def get_message( - db: Session, app_id: uuid.UUID, user_id: uuid.UUID, session_id: uuid.UUID, message_id: uuid.UUID + db: Session, + app_id: uuid.UUID, + user_id: uuid.UUID, + session_id: uuid.UUID, + message_id: uuid.UUID, ) -> Optional[models.Message]: stmt = ( select(models.Message) @@ -284,10 +315,12 @@ def get_message( ) return db.scalars(stmt).one_or_none() + ######################################################## # metamessage methods ######################################################## + def create_metamessage( db: Session, metamessage: schemas.MetamessageCreate, @@ -316,6 +349,7 @@ def create_metamessage( db.refresh(honcho_metamessage) return honcho_metamessage + def get_metamessages( db: Session, app_id: uuid.UUID, @@ -428,7 +462,10 @@ def get_collection_by_name( def create_collection( - db: Session, collection: schemas.CollectionCreate, app_id: uuid.UUID, user_id: uuid.UUID + db: Session, + collection: schemas.CollectionCreate, + app_id: uuid.UUID, + user_id: uuid.UUID, ) -> models.Collection: honcho_collection = models.Collection( user_id=user_id, diff --git a/api/src/main.py b/api/src/main.py index bbdf46c..fefcfbc 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -1,14 +1,14 @@ import uuid -from fastapi import Depends, FastAPI, HTTPException, APIRouter, Request from typing import Optional, Sequence -from sqlalchemy.orm import Session -from slowapi import Limiter, _rate_limit_exceeded_handler -from slowapi.middleware import SlowAPIMiddleware -from slowapi.util import get_remote_address -from slowapi.errors import RateLimitExceeded +from fastapi import APIRouter, Depends, FastAPI, HTTPException, Request from fastapi_pagination import Page, add_pagination from fastapi_pagination.ext.sqlalchemy import paginate +from slowapi import Limiter, _rate_limit_exceeded_handler +from slowapi.errors import RateLimitExceeded +from slowapi.middleware import SlowAPIMiddleware +from slowapi.util import get_remote_address +from sqlalchemy.orm import Session from . import crud, models, schemas from .db import SessionLocal, engine @@ -39,6 +39,7 @@ def get_db(): finally: db.close() + ######################################################## # App Routes ######################################################## @@ -62,6 +63,7 @@ def get_app( raise HTTPException(status_code=404, detail="App not found") return app + @app.get("/apps/name/{app_name}", response_model=schemas.App) def get_app_by_name( request: Request, @@ -101,6 +103,27 @@ def create_app( return crud.create_app(db, app=app) +@app.get("/apps/get_or_create/{app_name}", response_model=schemas.App) +def get_or_create_app( + request: Request, + app_name: str, + db: Session = Depends(get_db), +): + """Get or Create an App + + Args: + app_name (str): The name of the app + + Returns: + schemas.App: App object + + """ + app = crud.get_app_by_name(db, app_name=app_name) + if app is None: + app = crud.create_app(db, app=schemas.AppCreate(name=app_name)) + return app + + @app.put("/apps/{app_id}", response_model=schemas.App) def update_app( request: Request, @@ -149,29 +172,31 @@ def create_user( return crud.create_user(db, app_id=app_id, user=user) -@router.get("/apps/{app_id}/users", response_model=Page[schemas.User]) +@app.get("/apps/{app_id}/users", response_model=Page[schemas.User]) def get_users( request: Request, app_id: uuid.UUID, + reverse: bool = False, db: Session = Depends(get_db), ): """Get All Users for an App Args: - app_id (uuid.UUID): The ID of the app representing the client application using honcho + app_id (uuid.UUID): The ID of the app representing the client + application using honcho Returns: list[schemas.User]: List of User objects """ - return paginate(db, crud.get_users(db, app_id=app_id)) + return paginate(db, crud.get_users(db, app_id=app_id, reverse=reverse)) -@router.get("/apps/{app_id}/users/{user_id}", response_model=schemas.User) -def get_user( +@app.get("/apps/{app_id}/users/{name}", response_model=schemas.User) +def get_user_by_name( request: Request, app_id: uuid.UUID, - user_id: uuid.UUID, + name: str, db: Session = Depends(get_db), ): """Get a User @@ -184,15 +209,15 @@ def get_user( schemas.User: User object """ - return crud.get_user(db, app_id=app_id, user_id=user_id) + return crud.get_user_by_name(db, app_id=app_id, name=name) -@router.put("/users/{user_id}", response_model=schemas.User) +@app.put("/apps/{app_id}/users/{user_id}", response_model=schemas.User) def update_user( request: Request, app_id: uuid.UUID, user_id: uuid.UUID, - user: schemas.UserCreate, + user: schemas.UserUpdate, db: Session = Depends(get_db), ): """Update a User @@ -253,9 +278,11 @@ def create_session( """Create a Session for a User Args: - app_id (uuid.UUID): The ID of the app representing the client application using honcho + app_id (uuid.UUID): The ID of the app representing the client + application using honcho user_id (uuid.UUID): The User ID representing the user, managed by the user - session (schemas.SessionCreate): The Session object containing any metadata and a location ID + session (schemas.SessionCreate): The Session object containing any + metadata and a location ID Returns: schemas.Session: The Session object of the new Session @@ -584,7 +611,7 @@ def get_metamessage( ######################################################## -@router.get("/collections/all", response_model=Page[schemas.Collection]) +@router.get("/collections", response_model=Page[schemas.Collection]) def get_collections( request: Request, app_id: uuid.UUID, @@ -597,25 +624,25 @@ def get_collections( ) -@router.get("/collections/id/{collection_id}", response_model=schemas.Collection) -def get_collection_by_id( - request: Request, - app_id: uuid.UUID, - user_id: uuid.UUID, - collection_id: uuid.UUID, - db: Session = Depends(get_db), -) -> schemas.Collection: - honcho_collection = crud.get_collection_by_id( - db, app_id=app_id, user_id=user_id, collection_id=collection_id - ) - if honcho_collection is None: - raise HTTPException( - status_code=404, detail="collection not found or does not belong to user" - ) - return honcho_collection - - -@router.get("/collections/name/{name}", response_model=schemas.Collection) +# @router.get("/collections/id/{collection_id}", response_model=schemas.Collection) +# def get_collection_by_id( +# request: Request, +# app_id: uuid.UUID, +# user_id: uuid.UUID, +# collection_id: uuid.UUID, +# db: Session = Depends(get_db), +# ) -> schemas.Collection: +# honcho_collection = crud.get_collection_by_id( +# db, app_id=app_id, user_id=user_id, collection_id=collection_id +# ) +# if honcho_collection is None: +# raise HTTPException( +# status_code=404, detail="collection not found or does not belong to user" +# ) +# return honcho_collection + + +@router.get("/collections/{name}", response_model=schemas.Collection) def get_collection_by_name( request: Request, app_id: uuid.UUID, diff --git a/api/src/models.py b/api/src/models.py index a4c166c..ebd7cdd 100644 --- a/api/src/models.py +++ b/api/src/models.py @@ -4,7 +4,15 @@ from dotenv import load_dotenv from pgvector.sqlalchemy import Vector -from sqlalchemy import JSON, Column, DateTime, ForeignKey, String, UniqueConstraint, Uuid +from sqlalchemy import ( + JSON, + Column, + DateTime, + ForeignKey, + String, + UniqueConstraint, + Uuid, +) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, mapped_column, relationship @@ -15,7 +23,8 @@ DATABASE_TYPE = os.getenv("DATABASE_TYPE", "postgres") ColumnType = JSONB if DATABASE_TYPE == "postgres" else JSON - + + class App(Base): __tablename__ = "apps" id: Mapped[uuid.UUID] = mapped_column( @@ -24,12 +33,12 @@ class App(Base): name: Mapped[str] = mapped_column(String(512), index=True, unique=True) users = relationship("User", back_populates="app") created_at: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), - default=datetime.datetime.utcnow + DateTime(timezone=True), default=datetime.datetime.utcnow ) h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) # Add any additional fields for an app here + class User(Base): __tablename__ = "users" id: Mapped[uuid.UUID] = mapped_column( @@ -38,17 +47,19 @@ class User(Base): name: Mapped[str] = mapped_column(String(512), index=True) h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) created_at: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), - default=datetime.datetime.utcnow + DateTime(timezone=True), default=datetime.datetime.utcnow ) app_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("apps.id"), index=True) app = relationship("App", back_populates="users") sessions = relationship("Session", back_populates="user") collections = relationship("Collection", back_populates="user") + __table_args__ = (UniqueConstraint("name", "app_id", name="unique_name_app_user"),) + def __repr__(self) -> str: return f"User(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, created_at={self.created_at}, h_metadata={self.h_metadata})" + class Session(Base): __tablename__ = "sessions" id: Mapped[uuid.UUID] = mapped_column( @@ -58,8 +69,7 @@ class Session(Base): is_active: Mapped[bool] = mapped_column(default=True) h_metadata: Mapped[dict] = mapped_column("metadata", ColumnType, default={}) created_at: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), - default=datetime.datetime.utcnow + DateTime(timezone=True), default=datetime.datetime.utcnow ) messages = relationship("Message", back_populates="session") user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"), index=True) @@ -79,8 +89,7 @@ class Message(Base): content: Mapped[str] = mapped_column(String(65535)) created_at: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), - default=datetime.datetime.utcnow + DateTime(timezone=True), default=datetime.datetime.utcnow ) session = relationship("Session", back_populates="messages") metamessages = relationship("Metamessage", back_populates="message") @@ -100,8 +109,7 @@ class Metamessage(Base): message = relationship("Message", back_populates="metamessages") created_at: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), - default=datetime.datetime.utcnow + DateTime(timezone=True), default=datetime.datetime.utcnow ) def __repr__(self) -> str: @@ -115,8 +123,7 @@ class Collection(Base): ) name: Mapped[str] = mapped_column(String(512), index=True) created_at: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), - default=datetime.datetime.utcnow + DateTime(timezone=True), default=datetime.datetime.utcnow ) documents = relationship( "Document", back_populates="collection", cascade="all, delete, delete-orphan" @@ -125,9 +132,10 @@ class Collection(Base): user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("users.id"), index=True) __table_args__ = ( - UniqueConstraint("name", "user_id", name="unique_name_app_user"), + UniqueConstraint("name", "user_id", name="unique_name_collection_user"), ) + class Document(Base): __tablename__ = "documents" id: Mapped[uuid.UUID] = mapped_column( @@ -137,8 +145,7 @@ class Document(Base): content: Mapped[str] = mapped_column(String(65535)) embedding = mapped_column(Vector(1536)) created_at: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), - default=datetime.datetime.utcnow + DateTime(timezone=True), default=datetime.datetime.utcnow ) collection_id = Column(Uuid, ForeignKey("collections.id"), index=True) diff --git a/api/src/schemas.py b/api/src/schemas.py index 429fb1b..a0a779c 100644 --- a/api/src/schemas.py +++ b/api/src/schemas.py @@ -2,17 +2,21 @@ import datetime import uuid + class AppBase(BaseModel): pass + class AppCreate(AppBase): name: str metadata: dict | None = {} + class AppUpdate(AppBase): name: str | None = None metadata: dict | None = None + class App(AppBase): id: uuid.UUID name: str @@ -20,47 +24,48 @@ class App(AppBase): metadata: dict created_at: datetime.datetime - @validator('metadata', pre=True, allow_reuse=True) + @validator("metadata", pre=True, allow_reuse=True) def fetch_h_metadata(cls, value, values): - if 'h_metadata' in values: - return values['h_metadata'] + if "h_metadata" in values: + return values["h_metadata"] return {} class Config: from_attributes = True - schema_extra ={ - "exclude": ["h_metadata"] - } + schema_extra = {"exclude": ["h_metadata"]} + class UserBase(BaseModel): pass + class UserCreate(UserBase): name: str metadata: dict | None = {} + class UserUpdate(UserBase): name: str | None = None metadata: dict | None = None + class User(UserBase): id: uuid.UUID app_id: uuid.UUID created_at: datetime.datetime h_metadata: dict metadata: dict - - @validator('metadata', pre=True, allow_reuse=True) + + @validator("metadata", pre=True, allow_reuse=True) def fetch_h_metadata(cls, value, values): - if 'h_metadata' in values: - return values['h_metadata'] + if "h_metadata" in values: + return values["h_metadata"] return {} class Config: from_attributes = True - schema_extra = { - "exclude": ["h_metadata"] - } + schema_extra = {"exclude": ["h_metadata"]} + class MessageBase(BaseModel): content: str @@ -79,6 +84,7 @@ class Message(MessageBase): class Config: from_attributes = True + class SessionBase(BaseModel): pass @@ -86,31 +92,31 @@ class SessionBase(BaseModel): class SessionCreate(SessionBase): location_id: str metadata: dict | None = {} - + + class SessionUpdate(SessionBase): metadata: dict | None = None + class Session(SessionBase): id: uuid.UUID # messages: list[Message] is_active: bool - user_id: str + user_id: uuid.UUID location_id: str h_metadata: dict metadata: dict created_at: datetime.datetime - @validator('metadata', pre=True, allow_reuse=True) + @validator("metadata", pre=True, allow_reuse=True) def fetch_h_metadata(cls, value, values): - if 'h_metadata' in values: - return values['h_metadata'] + if "h_metadata" in values: + return values["h_metadata"] return {} class Config: from_attributes = True - schema_extra = { - "exclude": ["h_metadata"] - } + schema_extra = {"exclude": ["h_metadata"]} class MetamessageBase(BaseModel): @@ -130,34 +136,42 @@ class Metamessage(MetamessageBase): class Config: from_attributes = True + class CollectionBase(BaseModel): pass + class CollectionCreate(CollectionBase): name: str + class CollectionUpdate(CollectionBase): name: str + class Collection(CollectionBase): id: uuid.UUID name: str - user_id: str + user_id: uuid.UUID created_at: datetime.datetime class Config: from_attributes = True + class DocumentBase(BaseModel): content: str + class DocumentCreate(DocumentBase): metadata: dict | None = {} + class DocumentUpdate(DocumentBase): metadata: dict | None = None content: str | None = None + class Document(DocumentBase): id: uuid.UUID content: str @@ -166,15 +180,12 @@ class Document(DocumentBase): created_at: datetime.datetime collection_id: uuid.UUID - @validator('metadata', pre=True, allow_reuse=True) + @validator("metadata", pre=True, allow_reuse=True) def fetch_h_metadata(cls, value, values): - if 'h_metadata' in values: - return values['h_metadata'] + if "h_metadata" in values: + return values["h_metadata"] return {} class Config: from_attributes = True - schema_extra = { - "exclude": ["h_metadata"] - } - + schema_extra = {"exclude": ["h_metadata"]} diff --git a/sdk/honcho/__init__.py b/sdk/honcho/__init__.py index eda9003..6ab9451 100644 --- a/sdk/honcho/__init__.py +++ b/sdk/honcho/__init__.py @@ -1,4 +1,24 @@ -from .client import AsyncClient, AsyncSession, AsyncCollection, AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncGetDocumentPage, AsyncGetCollectionPage -from .sync_client import Client, Session, Collection, GetSessionPage, GetMessagePage, GetMetamessagePage, GetDocumentPage, GetCollectionPage +from .client import ( + AsyncHoncho, + AsyncUser, + AsyncSession, + AsyncCollection, + AsyncGetSessionPage, + AsyncGetMessagePage, + AsyncGetMetamessagePage, + AsyncGetDocumentPage, + AsyncGetCollectionPage, +) +from .sync_client import ( + Honcho, + User, + Session, + Collection, + GetSessionPage, + GetMessagePage, + GetMetamessagePage, + GetDocumentPage, + GetCollectionPage, +) from .schemas import Message, Metamessage, Document from .cache import LRUCache diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index 6c8b17d..afdaeaf 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -1,14 +1,18 @@ -import uuid +from __future__ import annotations + import datetime -from typing import Dict, Optional, List +import uuid +from typing import Optional + import httpx -from .schemas import Message, Metamessage, Document + +from .schemas import Document, Message, Metamessage class AsyncGetPage: """Base class for receiving Paginated API results""" - def __init__(self, response: Dict) -> None: + def __init__(self, response: dict) -> None: """Constructor for Page with relevant information about the results and pages Args: @@ -25,27 +29,60 @@ async def next(self): pass +class AsyncGetUserPage(AsyncGetPage): + """Paginated Results for Get User Requests""" + + def __init__(self, response: dict, honcho: AsyncHoncho, reverse: bool): + """Constructor for Page Result from User Get Request + + Args: + honcho (AsyncHoncho): Honcho Client + reverse (bool): Whether to reverse the order of the results or not + response (dict): Response from API with pagination information + """ + super().__init__(response) + self.honcho = honcho + self.reverse = reverse + self.items = [ + AsyncUser( + honcho=honcho, + id=user["id"], + created_at=user["created_at"], + metadata=user["metadata"], + ) + for user in response["items"] + ] + + async def next(self): + if self.page >= self.pages: + return None + return await self.honcho.get_users( + page=(self.page + 1), page_size=self.page_size, reverse=self.reverse + ) + + class AsyncGetSessionPage(AsyncGetPage): """Paginated Results for Get Session Requests""" - def __init__(self, client, options: Dict, response: Dict): + def __init__( + self, response: dict, user: AsyncUser, reverse: bool, location_id: Optional[str] + ): """Constructor for Page Result from Session Get Request Args: - client (AsyncClient): Honcho Client - options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are user_id which is required and location_id which is optional - response (Dict): Response from API with pagination information + user (AsyncUser): Honcho User associated with the session + location_id (str): ID of the location associated with the session + reverse (bool): Whether to reverse the order of the results or not + response (dict): Response from API with pagination information """ super().__init__(response) - self.client = client - self.user_id = options["user_id"] - self.location_id = options["location_id"] - self.reverse = options["reverse"] + self.user = user + self.location_id = location_id + self.reverse = reverse self.items = [ AsyncSession( - client=client, + user=user, id=session["id"], - user_id=session["user_id"], location_id=session["location_id"], is_active=session["is_active"], metadata=session["metadata"], @@ -57,12 +94,12 @@ def __init__(self, client, options: Dict, response: Dict): async def next(self): """Get the next page of results Returns: - AsyncGetSessionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + AsyncGetSessionPage | None: Next Page of Results or None if there + are no more sessions to retreive from a query """ if self.page >= self.pages: return None - return await self.client.get_sessions( - user_id=self.user_id, + return await self.user.get_sessions( location_id=self.location_id, page=(self.page + 1), page_size=self.page_size, @@ -73,16 +110,16 @@ async def next(self): class AsyncGetMessagePage(AsyncGetPage): """Paginated Results for Get Session Requests""" - def __init__(self, session, options, response: Dict): + def __init__(self, response: dict, session: AsyncSession, reverse: bool): """Constructor for Page Result from Session Get Request Args: session (AsyncSession): Session the returned messages are associated with - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information """ super().__init__(response) self.session = session - self.reverse = options["reverse"] + self.reverse = reverse self.items = [ Message( session_id=session.id, @@ -97,7 +134,8 @@ def __init__(self, session, options, response: Dict): async def next(self): """Get the next page of results Returns: - AsyncGetMessagePage | None: Next Page of Results or None if there are no more messages to retreive from a query + AsyncGetMessagePage | None: Next Page of Results or None if there + are no more messages to retreive from a query """ if self.page >= self.pages: return None @@ -107,21 +145,27 @@ async def next(self): class AsyncGetMetamessagePage(AsyncGetPage): - def __init__(self, session, options: Dict, response: Dict) -> None: + def __init__( + self, + response: dict, + session, + reverse: bool, + message_id: Optional[uuid.UUID], + metamessage_type: Optional[str], + ) -> None: """Constructor for Page Result from Metamessage Get Request Args: - session (AsyncSession): Session the returned messages are associated with - options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both optional - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information + session (AsyncSession): Session the returned messages are + associated with + reverse (bool): Whether to reverse the order of the results """ super().__init__(response) self.session = session - self.message_id = options["message_id"] if "message_id" in options else None - self.metamessage_type = ( - options["metamessage_type"] if "metamessage_type" in options else None - ) - self.reverse = options["reverse"] + self.message_id = message_id + self.metamessage_type = metamessage_type + self.reverse = reverse self.items = [ Metamessage( id=metamessage["id"], @@ -136,7 +180,8 @@ def __init__(self, session, options: Dict, response: Dict) -> None: async def next(self): """Get the next page of results Returns: - AsyncGetMetamessagePage | None: Next Page of Results or None if there are no more metamessages to retreive from a query + AsyncGetMetamessagePage | None: Next Page of Results or None if + there are no more metamessages to retreive from a query """ if self.page >= self.pages: return None @@ -152,16 +197,18 @@ async def next(self): class AsyncGetDocumentPage(AsyncGetPage): """Paginated results for Get Document requests""" - def __init__(self, collection, options, response: Dict) -> None: + def __init__(self, response: dict, collection, reverse: bool) -> None: """Constructor for Page Result from Document Get Request Args: - collection (AsyncCollection): Collection the returned documents are associated with - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information + collection (AsyncCollection): Collection the returned documents are + associated with + reverse (bool): Whether to reverse the order of the results or not """ super().__init__(response) self.collection = collection - self.reverse = options["reverse"] + self.reverse = reverse self.items = [ Document( id=document["id"], @@ -176,7 +223,8 @@ def __init__(self, collection, options, response: Dict) -> None: async def next(self): """Get the next page of results Returns: - AsyncGetDocumentPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + AsyncGetDocumentPage | None: Next Page of Results or None if there + are no more sessions to retreive from a query """ if self.page >= self.pages: return None @@ -188,23 +236,21 @@ async def next(self): class AsyncGetCollectionPage(AsyncGetPage): """Paginated results for Get Collection requests""" - def __init__(self, client, options: Dict, response: Dict): + def __init__(self, response: dict, user: AsyncUser, reverse: bool): """Constructor for page result from Get Collection Request Args: - client (Async Client): Honcho Client - options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information + user (AsyncUser): Honcho Client + reverse (bool): Whether to reverse the order of the results or not """ super().__init__(response) - self.client = client - self.user_id = options["user_id"] - self.reverse = options["reverse"] + self.user = user + self.reverse = reverse self.items = [ AsyncCollection( - client=client, + user=user, id=collection["id"], - user_id=collection["user_id"], name=collection["name"], created_at=collection["created_at"], ) @@ -214,51 +260,202 @@ def __init__(self, client, options: Dict, response: Dict): async def next(self): """Get the next page of results Returns: - AsyncGetCollectionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + AsyncGetCollectionPage | None: Next Page of Results or None if + there are no more sessions to retreive from a query """ if self.page >= self.pages: return None - return await self.client.get_collections( - user_id=self.user_id, + return await self.user.get_collections( page=self.page + 1, page_size=self.page_size, reverse=self.reverse, ) -class AsyncClient: +class AsyncHoncho: """Honcho API Client Object""" - def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): + def __init__(self, app_name: str, base_url: str = "https://demo.honcho.dev"): """Constructor for Client""" - self.base_url = base_url # Base URL for the instance of the Honcho API - self.app_id = app_id # Representing ID of the client application - self.client = httpx.AsyncClient() + self.server_url: str = base_url # Base URL for the instance of the Honcho API + self.client: httpx.AsyncClient = httpx.AsyncClient() + self.app_name: str = app_name # Representing name of the client application + self.app_id: uuid.UUID + self.metadata: dict + + async def initialize(self): + res = await self.client.get( + f"{self.server_url}/apps/get_or_create/{self.app_name}" + ) + res.raise_for_status() + data = res.json() + self.app_id: uuid.UUID = data["id"] + self.metadata: dict = data["metadata"] @property - def common_prefix(self): + def base_url(self): """Shorcut for common API prefix. made a property to prevent tampering""" - return f"{self.base_url}/apps/{self.app_id}" + return f"{self.server_url}/apps/{self.app_id}/users" - async def get_session(self, user_id: str, session_id: uuid.UUID): + async def create_user(self, name: str, metadata: Optional[dict] = None): + """Create a new user by name + + Args: + name (str): The name of the user + metadata (dict, optional): The metadata for the user. Defaults to {}. + + Returns: + AsyncUser: The created User object + """ + if metadata is None: + metadata = {} + url = f"{self.base_url}" + response = await self.client.post( + url, json={"name": name, "metadata": metadata} + ) + response.raise_for_status() + data = response.json() + return AsyncUser( + honcho=self, + id=data["id"], + metadata=data["metadata"], + created_at=data["created_at"], + ) + + async def get_user(self, name: str): + """Get a user by name + + Args: + name (str): The name of the user + + Returns: + AsyncUser: The User object + """ + url = f"{self.base_url}/{name}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncUser(self, **data) + + async def get_users( + self, page: int = 1, page_size: int = 50, reverse: bool = False + ): + """Get Paginated list of users + + Returns: + AsyncGetUserPage: Paginated list of users + """ + url = f"{self.base_url}?page={page}&size={page_size}&reverse={reverse}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncGetUserPage(data, self, reverse) + + async def get_users_generator( + self, + reverse: bool = False, + ): + """Shortcut Generator for get_users. Generator to iterate through + all users in an app + + Args: + reverse (bool): Whether to reverse the order of the results + + Yields: + AsyncUser: The User object + + """ + page = 1 + page_size = 50 + get_user_response = await self.get_users(page, page_size, reverse) + while True: + for session in get_user_response.items: + yield session + + new_users = await get_user_response.next() + if not new_users: + break + + get_user_response = new_users + + # async def get_user_by_id(self, id: uuid.UUID): + # """Get a user by id + + # Args: + # id (uuid.UUID): The id of the user + + # Returns: + # AsyncUser: The User object + # """ + # url = f"{self.common_prefix}/users/{id}" + # response = await self.client.get(url) + # response.raise_for_status() + # data = response.json() + # return AsyncUser(self, **data) + + +class AsyncUser: + """Represents a single user in an app""" + + def __init__( + self, + honcho: AsyncHoncho, + id: uuid.UUID, + metadata: dict, + created_at: datetime.datetime, + ): + """Constructor for User""" + # self.base_url: str = honcho.base_url + self.honcho: AsyncHoncho = honcho + self.id: uuid.UUID = id + self.metadata: dict = metadata + self.created_at: datetime.datetime = created_at + + @property + def base_url(self): + """Shortcut for common API prefix. made a property to prevent tampering""" + return f"{self.honcho.base_url}/{self.id}" + + def __str__(self): + """String representation of User""" + return f"AsyncUser(id={self.id}, app_id={self.honcho.app_id}, metadata={self.metadata})" # noqa: E501 + + # TODO method to update metadata + async def update_user(self, metadata: dict): + """Updates a user's metadata + + Args: + metadata (dict): The new metadata for the user + + Returns: + AsyncUser: The updated User object + + """ + url = f"{self.base_url}" + response = await self.honcho.client.put(url, json=metadata) + response.raise_for_status() + data = response.json() + self.metadata = data["metadata"] + # TODO update this object's metadata field + # return AsyncUser(self.honcho, **data) + + async def get_session(self, session_id: uuid.UUID): """Get a specific session for a user by ID Args: - user_id (str): The User ID representing the user, managed by the user session_id (uuid.UUID): The ID of the Session to retrieve Returns: AsyncSession: The Session object of the requested Session """ - url = f"{self.common_prefix}/users/{user_id}/sessions/{session_id}" - response = await self.client.get(url) + url = f"{self.base_url}/sessions/{session_id}" + response = await self.honcho.client.get(url) response.raise_for_status() data = response.json() return AsyncSession( - client=self, + user=self, id=data["id"], - user_id=data["user_id"], location_id=data["location_id"], is_active=data["is_active"], metadata=data["metadata"], @@ -267,7 +464,6 @@ async def get_session(self, user_id: str, session_id: uuid.UUID): async def get_sessions( self, - user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50, @@ -276,8 +472,8 @@ async def get_sessions( """Return sessions associated with a user paginated Args: - user_id (str): The User ID representing the user, managed by the user - location_id (str, optional): Optional Location ID representing the location of a session + location_id (str, optional): Optional Location ID representing the + location of a session page (int, optional): The page of results to return page_size (int, optional): The number of results to return @@ -286,26 +482,25 @@ async def get_sessions( """ url = ( - f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}&reverse={reverse}" + f"{self.base_url}/sessions?page={page}&size={page_size}&reverse={reverse}" + (f"&location_id={location_id}" if location_id else "") ) - response = await self.client.get(url) + response = await self.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"location_id": location_id, "user_id": user_id, "reverse": reverse} - return AsyncGetSessionPage(self, options, data) + return AsyncGetSessionPage(data, self, reverse, location_id) async def get_sessions_generator( self, - user_id: str, location_id: Optional[str] = None, reverse: bool = False, ): - """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + """Shortcut Generator for get_sessions. Generator to iterate through + all sessions for a user in an app Args: - user_id (str): The User ID representing the user, managed by the user - location_id (str, optional): Optional Location ID representing the location of a session + location_id (str, optional): Optional Location ID representing the + location of a session Yields: AsyncSession: The Session object of the requested Session @@ -314,10 +509,9 @@ async def get_sessions_generator( page = 1 page_size = 50 get_session_response = await self.get_sessions( - user_id, location_id, page, page_size, reverse + location_id, page, page_size, reverse ) while True: - # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for session in get_session_response.items: yield session @@ -328,28 +522,29 @@ async def get_sessions_generator( get_session_response = new_sessions async def create_session( - self, user_id: str, location_id: str = "default", metadata: Dict = {} + self, location_id: str = "default", metadata: Optional[dict] = None ): """Create a session for a user Args: - user_id (str): The User ID representing the user, managed by the user - location_id (str, optional): Optional Location ID representing the location of a session - metadata (Dict, optional): Optional session metadata + location_id (str, optional): Optional Location ID representing the + location of a session + metadata (dict, optional): Optional session metadata Returns: AsyncSession: The Session object of the new Session """ + if metadata is None: + metadata = {} data = {"location_id": location_id, "metadata": metadata} - url = f"{self.common_prefix}/users/{user_id}/sessions" - response = await self.client.post(url, json=data) + url = f"{self.base_url}/sessions" + response = await self.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return AsyncSession( self, id=data["id"], - user_id=user_id, location_id=location_id, metadata=metadata, is_active=data["is_active"], @@ -358,13 +553,11 @@ async def create_session( async def create_collection( self, - user_id: str, name: str, ): """Create a collection for a user Args: - user_id (str): The User ID representing the user, managed by the user name (str): unique name for the collection for the user Returns: @@ -372,48 +565,44 @@ async def create_collection( """ data = {"name": name} - url = f"{self.common_prefix}/users/{user_id}/collections" - response = await self.client.post(url, json=data) + url = f"{self.base_url}/collections" + response = await self.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return AsyncCollection( self, id=data["id"], - user_id=user_id, name=name, created_at=data["created_at"], ) - async def get_collection(self, user_id: str, name: str): + async def get_collection(self, name: str): """Get a specific collection for a user by name Args: - user_id (str): The User ID representing the user, managed by the user name (str): The name of the collection to get Returns: AsyncCollection: The Session object of the requested Session """ - url = f"{self.common_prefix}/users/{user_id}/collections/name/{name}" - response = await self.client.get(url) + url = f"{self.base_url}/collections/{name}" + response = await self.honcho.client.get(url) response.raise_for_status() data = response.json() return AsyncCollection( - client=self, + user=self, id=data["id"], - user_id=data["user_id"], name=data["name"], created_at=data["created_at"], ) async def get_collections( - self, user_id: str, page: int = 1, page_size: int = 50, reverse: bool = False + self, page: int = 1, page_size: int = 50, reverse: bool = False ): """Return collections associated with a user paginated Args: - user_id (str): The User ID representing the user to get the collection for page (int, optional): The page of results to return page_size (int, optional): The number of results to return reverse (bool): Whether to reverse the order of the results @@ -422,18 +611,18 @@ async def get_collections( AsyncGetCollectionPage: Page or results for get_collections query """ - url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}&reverse={reverse}" - response = await self.client.get(url) + url = f"{self.base_url}/collections?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 + response = await self.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"user_id": user_id, "reverse": reverse} - return AsyncGetCollectionPage(self, options, data) + return AsyncGetCollectionPage(data, self, reverse) - async def get_collections_generator(self, user_id: str, reverse: bool = False): - """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + async def get_collections_generator(self, reverse: bool = False): + """Shortcut Generator for get_sessions. Generator to iterate through + all sessions for a user in an app Args: - user_id (str): The User ID representing the user, managed by the user + reverse (bool): Whether to reverse the order of the results Yields: AsyncCollection: The Session object of the requested Session @@ -441,11 +630,8 @@ async def get_collections_generator(self, user_id: str, reverse: bool = False): """ page = 1 page_size = 50 - get_collection_response = await self.get_collections( - user_id, page, page_size, reverse - ) + get_collection_response = await self.get_collections(page, page_size, reverse) while True: - # get_collection_response = self.get_collections(user_id, location_id, page, page_size) for collection in get_collection_response.items: yield collection @@ -461,33 +647,29 @@ class AsyncSession: def __init__( self, - client: AsyncClient, + user: AsyncUser, id: uuid.UUID, - user_id: str, location_id: str, metadata: dict, is_active: bool, created_at: datetime.datetime, ): """Constructor for Session""" - self.base_url: str = client.base_url - self.client: httpx.AsyncClient = client.client - self.app_id: str = client.app_id + self.user: AsyncUser = user self.id: uuid.UUID = id - self.user_id: str = user_id self.location_id: str = location_id self.metadata: dict = metadata self._is_active: bool = is_active self.created_at: datetime.datetime = created_at @property - def common_prefix(self): + def base_url(self): """Shortcut for common API prefix. made a property to prevent tampering""" - return f"{self.base_url}/apps/{self.app_id}" + return f"{self.user.base_url}/sessions/{self.id}" def __str__(self): """String representation of Session""" - return f"AsyncSession(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" + return f"AsyncSession(id={self.id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" # noqa: E501 @property def is_active(self): @@ -508,8 +690,8 @@ async def create_message(self, is_user: bool, content: str): if not self.is_active: raise Exception("Session is inactive") data = {"is_user": is_user, "content": content} - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages" - response = await self.client.post(url, json=data) + url = f"{self.base_url}/messages" + response = await self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Message( @@ -530,8 +712,8 @@ async def get_message(self, message_id: uuid.UUID) -> Message: Message: The Message object """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages/{message_id}" - response = await self.client.get(url) + url = f"{self.base_url}/messages/{message_id}" + response = await self.user.honcho.client.get(url) response.raise_for_status() data = response.json() return Message( @@ -556,15 +738,15 @@ async def get_messages( AsyncGetMessagePage: Page of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}&reverse={reverse}" - response = await self.client.get(url) + url = f"{self.base_url}/messages?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 + response = await self.user.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"reverse": reverse} - return AsyncGetMessagePage(self, options, data) + return AsyncGetMessagePage(data, self, reverse) async def get_messages_generator(self, reverse: bool = False): - """Shortcut Generator for get_messages. Generator to iterate through all messages for a session in an app + """Shortcut Generator for get_messages. Generator to iterate through + all messages for a session in an app Yields: Message: The Message object of the next Message @@ -574,7 +756,6 @@ async def get_messages_generator(self, reverse: bool = False): page_size = 50 get_messages_page = await self.get_messages(page, page_size, reverse) while True: - # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for message in get_messages_page.items: yield message @@ -605,10 +786,8 @@ async def create_metamessage( "content": content, "message_id": message.id, } - url = ( - f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" - ) - response = await self.client.post(url, json=data) + url = f"{self.base_url}/metamessages" + response = await self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Metamessage( @@ -629,8 +808,8 @@ async def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: Message: The Message object """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages/{metamessage_id}" - response = await self.client.get(url) + url = f"{self.base_url}/metamessages/{metamessage_id}" + response = await self.user.honcho.client.get(url) response.raise_for_status() data = response.json() return Metamessage( @@ -652,27 +831,28 @@ async def get_metamessages( """Get all messages for a session Args: - user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to retrieve + metamessage_type (str, optional): The type of the metamessage + message (Message, optional): The message to associate the metamessage with + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page + reverse (bool): Whether to reverse the order of the results Returns: - list[Dict]: List of Message objects + list[dict]: List of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}&reverse={reverse}" + url = f"{self.base_url}/metamessages?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 if metamessage_type: url += f"&metamessage_type={metamessage_type}" if message: url += f"&message_id={message.id}" - response = await self.client.get(url) + response = await self.user.honcho.client.get(url) response.raise_for_status() data = response.json() - options = { - "metamessage_type": metamessage_type, - "message_id": message.id if message else None, - "reverse": reverse, - } - return AsyncGetMetamessagePage(self, options, data) + message_id = message.id if message else None + return AsyncGetMetamessagePage( + data, self, reverse, message_id, metamessage_type + ) async def get_metamessages_generator( self, @@ -680,7 +860,8 @@ async def get_metamessages_generator( message: Optional[Message] = None, reverse: bool = False, ): - """Shortcut Generator for get_metamessages. Generator to iterate through all metamessages for a session in an app + """Shortcut Generator for get_metamessages. Generator to iterate + through all metamessages for a session in an app Args: metamessage_type (str, optional): Optional Metamessage type to filter by @@ -709,26 +890,26 @@ async def get_metamessages_generator( get_metamessages_page = new_messages - async def update(self, metadata: Dict): + async def update(self, metadata: dict): """Update the metadata of a session Args: - metadata (Dict): The Session object containing any new metadata + metadata (dict): The Session object containing any new metadata Returns: boolean: Whether the session was successfully updated """ info = {"metadata": metadata} - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" - response = await self.client.put(url, json=info) + url = f"{self.base_url}" + response = await self.user.honcho.client.put(url, json=info) success = response.status_code < 400 self.metadata = metadata return success async def close(self): """Closes a session by marking it as inactive""" - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" - response = await self.client.delete(url) + url = f"{self.base_url}" + response = await self.user.honcho.client.delete(url) response.raise_for_status() self._is_active = False @@ -738,29 +919,25 @@ class AsyncCollection: def __init__( self, - client: AsyncClient, + user: AsyncUser, id: uuid.UUID, - user_id: str, name: str, created_at: datetime.datetime, ): """Constructor for Collection""" - self.base_url: str = client.base_url - self.client: httpx.AsyncClient = client.client - self.app_id: str = client.app_id + self.user = user self.id: uuid.UUID = id - self.user_id: str = user_id self.name: str = name self.created_at: datetime.datetime = created_at @property - def common_prefix(self): + def base_url(self): """Shortcut for common API prefix. made a property to prevent tampering""" - return f"{self.base_url}/apps/{self.app_id}" + return f"{self.user.base_url}/collections/{self.id}" def __str__(self): """String representation of Collection""" - return f"AsyncCollection(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, name={self.name}, created_at={self.created_at})" + return f"AsyncCollection(id={self.id}, name={self.name}, created_at={self.created_at})" # noqa: E501 async def update(self, name: str): """Update the name of the collection @@ -772,8 +949,8 @@ async def update(self, name: str): boolean: Whether the session was successfully updated """ info = {"name": name} - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" - response = await self.client.put(url, json=info) + url = f"{self.base_url}" + response = await self.user.honcho.client.put(url, json=info) response.raise_for_status() success = response.status_code < 400 self.name = name @@ -781,26 +958,27 @@ async def update(self, name: str): async def delete(self): """Delete a collection and all associated documents""" - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" - response = await self.client.delete(url) + url = f"{self.base_url}" + response = await self.user.honcho.client.delete(url) response.raise_for_status() - async def create_document(self, content: str, metadata: Dict = {}): + async def create_document(self, content: str, metadata: Optional[dict] = None): """Adds a document to the collection Args: content (str): The content of the document - metadata (Dict): The metadata of the document + metadata (dict): The metadata of the document Returns: Document: The Document object of the added document """ + if metadata is None: + metadata = {} data = {"metadata": metadata, "content": content} - url = ( - f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" - ) - response = await self.client.post(url, json=data) + url = f"{self.base_url}/documents" + print(url) + response = await self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Document( @@ -821,8 +999,8 @@ async def get_document(self, document_id: uuid.UUID) -> Document: Document: The Document object """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document_id}" - response = await self.client.get(url) + url = f"{self.base_url}/documents/{document_id}" + response = await self.user.honcho.client.get(url) response.raise_for_status() data = response.json() return Document( @@ -846,15 +1024,17 @@ async def get_documents( AsyncGetDocumentPage: Page of Document objects """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}&reverse={reverse}" - response = await self.client.get(url) + url = ( + f"{self.base_url}/documents?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 + ) + response = await self.user.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"reverse": reverse} - return AsyncGetDocumentPage(self, options, data) + return AsyncGetDocumentPage(data, self, reverse) async def get_documents_generator(self, reverse: bool = False): - """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app + """Shortcut Generator for get_documents. Generator to iterate through + all documents for a collection in an app Yields: Document: The Document object of the next Document @@ -873,7 +1053,7 @@ async def get_documents_generator(self, reverse: bool = False): get_documents_page = new_documents - async def query(self, query: str, top_k: int = 5) -> List[Document]: + async def query(self, query: str, top_k: int = 5) -> list[Document]: """query the documents by cosine distance Args: query (str): The query string to compare other embeddings too @@ -882,8 +1062,8 @@ async def query(self, query: str, top_k: int = 5) -> List[Document]: Returns: List[Document]: The response from the query with matching documents """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/query?query={query}&top_k={top_k}" - response = await self.client.get(url) + url = f"{self.base_url}/query?query={query}&top_k={top_k}" + response = await self.user.honcho.client.get(url) response.raise_for_status() data = [ Document( @@ -898,13 +1078,13 @@ async def query(self, query: str, top_k: int = 5) -> List[Document]: return data async def update_document( - self, document: Document, content: Optional[str], metadata: Optional[Dict] + self, document: Document, content: Optional[str], metadata: Optional[dict] ) -> Document: """Update a document in the collection Args: document (Document): The Document to update - metadata (Dict): The metadata of the document + metadata (dict): The metadata of the document content (str): The content of the document Returns: @@ -913,8 +1093,8 @@ async def update_document( if metadata is None and content is None: raise ValueError("metadata and content cannot both be None") data = {"metadata": metadata, "content": content} - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" - response = await self.client.put(url, json=data) + url = f"{self.base_url}/documents/{document.id}" + response = await self.user.honcho.client.put(url, json=data) response.raise_for_status() data = response.json() return Document( @@ -934,8 +1114,8 @@ async def delete_document(self, document: Document) -> bool: Returns: boolean: Whether the document was successfully deleted """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" - response = await self.client.delete(url) + url = f"{self.base_url}/documents/{document.id}" + response = await self.user.honcho.client.delete(url) response.raise_for_status() success = response.status_code < 400 return success diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 5606bb6..69a4fe7 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -1,14 +1,18 @@ -import uuid +from __future__ import annotations + import datetime -from typing import Dict, Optional, List +import uuid +from typing import Optional + import httpx -from .schemas import Message, Metamessage, Document + +from .schemas import Document, Message, Metamessage class GetPage: """Base class for receiving Paginated API results""" - def __init__(self, response: Dict) -> None: + def __init__(self, response: dict) -> None: """Constructor for Page with relevant information about the results and pages Args: @@ -25,27 +29,60 @@ def next(self): pass +class GetUserPage(GetPage): + """Paginated Results for Get User Requests""" + + def __init__(self, response: dict, honcho: Honcho, reverse: bool): + """Constructor for Page Result from User Get Request + + Args: + honcho (Honcho): Honcho Client + reverse (bool): Whether to reverse the order of the results or not + response (dict): Response from API with pagination information + """ + super().__init__(response) + self.honcho = honcho + self.reverse = reverse + self.items = [ + User( + honcho=honcho, + id=user["id"], + created_at=user["created_at"], + metadata=user["metadata"], + ) + for user in response["items"] + ] + + def next(self): + if self.page >= self.pages: + return None + return self.honcho.get_users( + page=(self.page + 1), page_size=self.page_size, reverse=self.reverse + ) + + class GetSessionPage(GetPage): """Paginated Results for Get Session Requests""" - def __init__(self, client, options: Dict, response: Dict): + def __init__( + self, response: dict, user: User, reverse: bool, location_id: Optional[str] + ): """Constructor for Page Result from Session Get Request Args: - client (Client): Honcho Client - options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are user_id which is required and location_id which is optional - response (Dict): Response from API with pagination information + user (User): Honcho User associated with the session + location_id (str): ID of the location associated with the session + reverse (bool): Whether to reverse the order of the results or not + response (dict): Response from API with pagination information """ super().__init__(response) - self.client = client - self.user_id = options["user_id"] - self.location_id = options["location_id"] - self.reverse = options["reverse"] + self.user = user + self.location_id = location_id + self.reverse = reverse self.items = [ Session( - client=client, + user=user, id=session["id"], - user_id=session["user_id"], location_id=session["location_id"], is_active=session["is_active"], metadata=session["metadata"], @@ -57,12 +94,12 @@ def __init__(self, client, options: Dict, response: Dict): def next(self): """Get the next page of results Returns: - GetSessionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + GetSessionPage | None: Next Page of Results or None if there + are no more sessions to retreive from a query """ if self.page >= self.pages: return None - return self.client.get_sessions( - user_id=self.user_id, + return self.user.get_sessions( location_id=self.location_id, page=(self.page + 1), page_size=self.page_size, @@ -73,16 +110,16 @@ def next(self): class GetMessagePage(GetPage): """Paginated Results for Get Session Requests""" - def __init__(self, session, options, response: Dict): + def __init__(self, response: dict, session: Session, reverse: bool): """Constructor for Page Result from Session Get Request Args: session (Session): Session the returned messages are associated with - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information """ super().__init__(response) self.session = session - self.reverse = options["reverse"] + self.reverse = reverse self.items = [ Message( session_id=session.id, @@ -97,7 +134,8 @@ def __init__(self, session, options, response: Dict): def next(self): """Get the next page of results Returns: - GetMessagePage | None: Next Page of Results or None if there are no more messages to retreive from a query + GetMessagePage | None: Next Page of Results or None if there + are no more messages to retreive from a query """ if self.page >= self.pages: return None @@ -107,21 +145,27 @@ def next(self): class GetMetamessagePage(GetPage): - def __init__(self, session, options: Dict, response: Dict) -> None: + def __init__( + self, + response: dict, + session, + reverse: bool, + message_id: Optional[uuid.UUID], + metamessage_type: Optional[str], + ) -> None: """Constructor for Page Result from Metamessage Get Request Args: - session (Session): Session the returned messages are associated with - options (Dict): Options for the request used mainly for next() to filter queries. The two parameters available are message_id and metamessage_type which are both optional - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information + session (Session): Session the returned messages are + associated with + reverse (bool): Whether to reverse the order of the results """ super().__init__(response) self.session = session - self.message_id = options["message_id"] if "message_id" in options else None - self.metamessage_type = ( - options["metamessage_type"] if "metamessage_type" in options else None - ) - self.reverse = options["reverse"] + self.message_id = message_id + self.metamessage_type = metamessage_type + self.reverse = reverse self.items = [ Metamessage( id=metamessage["id"], @@ -136,7 +180,8 @@ def __init__(self, session, options: Dict, response: Dict) -> None: def next(self): """Get the next page of results Returns: - GetMetamessagePage | None: Next Page of Results or None if there are no more metamessages to retreive from a query + GetMetamessagePage | None: Next Page of Results or None if + there are no more metamessages to retreive from a query """ if self.page >= self.pages: return None @@ -152,16 +197,18 @@ def next(self): class GetDocumentPage(GetPage): """Paginated results for Get Document requests""" - def __init__(self, collection, options, response: Dict) -> None: + def __init__(self, response: dict, collection, reverse: bool) -> None: """Constructor for Page Result from Document Get Request Args: - collection (Collection): Collection the returned documents are associated with - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information + collection (Collection): Collection the returned documents are + associated with + reverse (bool): Whether to reverse the order of the results or not """ super().__init__(response) self.collection = collection - self.reverse = options["reverse"] + self.reverse = reverse self.items = [ Document( id=document["id"], @@ -176,7 +223,8 @@ def __init__(self, collection, options, response: Dict) -> None: def next(self): """Get the next page of results Returns: - GetDocumentPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + GetDocumentPage | None: Next Page of Results or None if there + are no more sessions to retreive from a query """ if self.page >= self.pages: return None @@ -188,23 +236,21 @@ def next(self): class GetCollectionPage(GetPage): """Paginated results for Get Collection requests""" - def __init__(self, client, options: Dict, response: Dict): + def __init__(self, response: dict, user: User, reverse: bool): """Constructor for page result from Get Collection Request Args: - client ( Client): Honcho Client - options (Dict): Options for the request used mainly for next() to filter queries. The only parameter available is user_id which is required - response (Dict): Response from API with pagination information + response (dict): Response from API with pagination information + user (User): Honcho Client + reverse (bool): Whether to reverse the order of the results or not """ super().__init__(response) - self.client = client - self.user_id = options["user_id"] - self.reverse = options["reverse"] + self.user = user + self.reverse = reverse self.items = [ Collection( - client=client, + user=user, id=collection["id"], - user_id=collection["user_id"], name=collection["name"], created_at=collection["created_at"], ) @@ -214,51 +260,202 @@ def __init__(self, client, options: Dict, response: Dict): def next(self): """Get the next page of results Returns: - GetCollectionPage | None: Next Page of Results or None if there are no more sessions to retreive from a query + GetCollectionPage | None: Next Page of Results or None if + there are no more sessions to retreive from a query """ if self.page >= self.pages: return None - return self.client.get_collections( - user_id=self.user_id, + return self.user.get_collections( page=self.page + 1, page_size=self.page_size, reverse=self.reverse, ) -class Client: +class Honcho: """Honcho API Client Object""" - def __init__(self, app_id: str, base_url: str = "https://demo.honcho.dev"): + def __init__(self, app_name: str, base_url: str = "https://demo.honcho.dev"): """Constructor for Client""" - self.base_url = base_url # Base URL for the instance of the Honcho API - self.app_id = app_id # Representing ID of the client application - self.client = httpx.Client() + self.server_url: str = base_url # Base URL for the instance of the Honcho API + self.client: httpx.Client = httpx.Client() + self.app_name: str = app_name # Representing name of the client application + self.app_id: uuid.UUID + self.metadata: dict + + def initialize(self): + res = self.client.get( + f"{self.server_url}/apps/get_or_create/{self.app_name}" + ) + res.raise_for_status() + data = res.json() + self.app_id: uuid.UUID = data["id"] + self.metadata: dict = data["metadata"] @property - def common_prefix(self): + def base_url(self): """Shorcut for common API prefix. made a property to prevent tampering""" - return f"{self.base_url}/apps/{self.app_id}" + return f"{self.server_url}/apps/{self.app_id}/users" - def get_session(self, user_id: str, session_id: uuid.UUID): + def create_user(self, name: str, metadata: Optional[dict] = None): + """Create a new user by name + + Args: + name (str): The name of the user + metadata (dict, optional): The metadata for the user. Defaults to {}. + + Returns: + User: The created User object + """ + if metadata is None: + metadata = {} + url = f"{self.base_url}" + response = self.client.post( + url, json={"name": name, "metadata": metadata} + ) + response.raise_for_status() + data = response.json() + return User( + honcho=self, + id=data["id"], + metadata=data["metadata"], + created_at=data["created_at"], + ) + + def get_user(self, name: str): + """Get a user by name + + Args: + name (str): The name of the user + + Returns: + User: The User object + """ + url = f"{self.base_url}/{name}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return User(self, **data) + + def get_users( + self, page: int = 1, page_size: int = 50, reverse: bool = False + ): + """Get Paginated list of users + + Returns: + GetUserPage: Paginated list of users + """ + url = f"{self.base_url}?page={page}&size={page_size}&reverse={reverse}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return GetUserPage(data, self, reverse) + + def get_users_generator( + self, + reverse: bool = False, + ): + """Shortcut Generator for get_users. Generator to iterate through + all users in an app + + Args: + reverse (bool): Whether to reverse the order of the results + + Yields: + User: The User object + + """ + page = 1 + page_size = 50 + get_user_response = self.get_users(page, page_size, reverse) + while True: + for session in get_user_response.items: + yield session + + new_users = get_user_response.next() + if not new_users: + break + + get_user_response = new_users + + # def get_user_by_id(self, id: uuid.UUID): + # """Get a user by id + + # Args: + # id (uuid.UUID): The id of the user + + # Returns: + # User: The User object + # """ + # url = f"{self.common_prefix}/users/{id}" + # response = self.client.get(url) + # response.raise_for_status() + # data = response.json() + # return User(self, **data) + + +class User: + """Represents a single user in an app""" + + def __init__( + self, + honcho: Honcho, + id: uuid.UUID, + metadata: dict, + created_at: datetime.datetime, + ): + """Constructor for User""" + # self.base_url: str = honcho.base_url + self.honcho: Honcho = honcho + self.id: uuid.UUID = id + self.metadata: dict = metadata + self.created_at: datetime.datetime = created_at + + @property + def base_url(self): + """Shortcut for common API prefix. made a property to prevent tampering""" + return f"{self.honcho.base_url}/{self.id}" + + def __str__(self): + """String representation of User""" + return f"User(id={self.id}, app_id={self.honcho.app_id}, metadata={self.metadata})" # noqa: E501 + + # TODO method to update metadata + def update_user(self, metadata: dict): + """Updates a user's metadata + + Args: + metadata (dict): The new metadata for the user + + Returns: + User: The updated User object + + """ + url = f"{self.base_url}" + response = self.honcho.client.put(url, json=metadata) + response.raise_for_status() + data = response.json() + self.metadata = data["metadata"] + # TODO update this object's metadata field + # return User(self.honcho, **data) + + def get_session(self, session_id: uuid.UUID): """Get a specific session for a user by ID Args: - user_id (str): The User ID representing the user, managed by the user session_id (uuid.UUID): The ID of the Session to retrieve Returns: Session: The Session object of the requested Session """ - url = f"{self.common_prefix}/users/{user_id}/sessions/{session_id}" - response = self.client.get(url) + url = f"{self.base_url}/sessions/{session_id}" + response = self.honcho.client.get(url) response.raise_for_status() data = response.json() return Session( - client=self, + user=self, id=data["id"], - user_id=data["user_id"], location_id=data["location_id"], is_active=data["is_active"], metadata=data["metadata"], @@ -267,7 +464,6 @@ def get_session(self, user_id: str, session_id: uuid.UUID): def get_sessions( self, - user_id: str, location_id: Optional[str] = None, page: int = 1, page_size: int = 50, @@ -276,8 +472,8 @@ def get_sessions( """Return sessions associated with a user paginated Args: - user_id (str): The User ID representing the user, managed by the user - location_id (str, optional): Optional Location ID representing the location of a session + location_id (str, optional): Optional Location ID representing the + location of a session page (int, optional): The page of results to return page_size (int, optional): The number of results to return @@ -286,26 +482,25 @@ def get_sessions( """ url = ( - f"{self.common_prefix}/users/{user_id}/sessions?page={page}&size={page_size}&reverse={reverse}" + f"{self.base_url}/sessions?page={page}&size={page_size}&reverse={reverse}" + (f"&location_id={location_id}" if location_id else "") ) - response = self.client.get(url) + response = self.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"location_id": location_id, "user_id": user_id, "reverse": reverse} - return GetSessionPage(self, options, data) + return GetSessionPage(data, self, reverse, location_id) def get_sessions_generator( self, - user_id: str, location_id: Optional[str] = None, reverse: bool = False, ): - """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + """Shortcut Generator for get_sessions. Generator to iterate through + all sessions for a user in an app Args: - user_id (str): The User ID representing the user, managed by the user - location_id (str, optional): Optional Location ID representing the location of a session + location_id (str, optional): Optional Location ID representing the + location of a session Yields: Session: The Session object of the requested Session @@ -314,10 +509,9 @@ def get_sessions_generator( page = 1 page_size = 50 get_session_response = self.get_sessions( - user_id, location_id, page, page_size, reverse + location_id, page, page_size, reverse ) while True: - # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for session in get_session_response.items: yield session @@ -328,28 +522,29 @@ def get_sessions_generator( get_session_response = new_sessions def create_session( - self, user_id: str, location_id: str = "default", metadata: Dict = {} + self, location_id: str = "default", metadata: Optional[dict] = None ): """Create a session for a user Args: - user_id (str): The User ID representing the user, managed by the user - location_id (str, optional): Optional Location ID representing the location of a session - metadata (Dict, optional): Optional session metadata + location_id (str, optional): Optional Location ID representing the + location of a session + metadata (dict, optional): Optional session metadata Returns: Session: The Session object of the new Session """ + if metadata is None: + metadata = {} data = {"location_id": location_id, "metadata": metadata} - url = f"{self.common_prefix}/users/{user_id}/sessions" - response = self.client.post(url, json=data) + url = f"{self.base_url}/sessions" + response = self.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Session( self, id=data["id"], - user_id=user_id, location_id=location_id, metadata=metadata, is_active=data["is_active"], @@ -358,13 +553,11 @@ def create_session( def create_collection( self, - user_id: str, name: str, ): """Create a collection for a user Args: - user_id (str): The User ID representing the user, managed by the user name (str): unique name for the collection for the user Returns: @@ -372,48 +565,44 @@ def create_collection( """ data = {"name": name} - url = f"{self.common_prefix}/users/{user_id}/collections" - response = self.client.post(url, json=data) + url = f"{self.base_url}/collections" + response = self.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Collection( self, id=data["id"], - user_id=user_id, name=name, created_at=data["created_at"], ) - def get_collection(self, user_id: str, name: str): + def get_collection(self, name: str): """Get a specific collection for a user by name Args: - user_id (str): The User ID representing the user, managed by the user name (str): The name of the collection to get Returns: Collection: The Session object of the requested Session """ - url = f"{self.common_prefix}/users/{user_id}/collections/name/{name}" - response = self.client.get(url) + url = f"{self.base_url}/collections/{name}" + response = self.honcho.client.get(url) response.raise_for_status() data = response.json() return Collection( - client=self, + user=self, id=data["id"], - user_id=data["user_id"], name=data["name"], created_at=data["created_at"], ) def get_collections( - self, user_id: str, page: int = 1, page_size: int = 50, reverse: bool = False + self, page: int = 1, page_size: int = 50, reverse: bool = False ): """Return collections associated with a user paginated Args: - user_id (str): The User ID representing the user to get the collection for page (int, optional): The page of results to return page_size (int, optional): The number of results to return reverse (bool): Whether to reverse the order of the results @@ -422,18 +611,18 @@ def get_collections( GetCollectionPage: Page or results for get_collections query """ - url = f"{self.common_prefix}/users/{user_id}/collections/all?page={page}&size={page_size}&reverse={reverse}" - response = self.client.get(url) + url = f"{self.base_url}/collections?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 + response = self.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"user_id": user_id, "reverse": reverse} - return GetCollectionPage(self, options, data) + return GetCollectionPage(data, self, reverse) - def get_collections_generator(self, user_id: str, reverse: bool = False): - """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app + def get_collections_generator(self, reverse: bool = False): + """Shortcut Generator for get_sessions. Generator to iterate through + all sessions for a user in an app Args: - user_id (str): The User ID representing the user, managed by the user + reverse (bool): Whether to reverse the order of the results Yields: Collection: The Session object of the requested Session @@ -441,11 +630,8 @@ def get_collections_generator(self, user_id: str, reverse: bool = False): """ page = 1 page_size = 50 - get_collection_response = self.get_collections( - user_id, page, page_size, reverse - ) + get_collection_response = self.get_collections(page, page_size, reverse) while True: - # get_collection_response = self.get_collections(user_id, location_id, page, page_size) for collection in get_collection_response.items: yield collection @@ -461,33 +647,29 @@ class Session: def __init__( self, - client: Client, + user: User, id: uuid.UUID, - user_id: str, location_id: str, metadata: dict, is_active: bool, created_at: datetime.datetime, ): """Constructor for Session""" - self.base_url: str = client.base_url - self.client: httpx.Client = client.client - self.app_id: str = client.app_id + self.user: User = user self.id: uuid.UUID = id - self.user_id: str = user_id self.location_id: str = location_id self.metadata: dict = metadata self._is_active: bool = is_active self.created_at: datetime.datetime = created_at @property - def common_prefix(self): + def base_url(self): """Shortcut for common API prefix. made a property to prevent tampering""" - return f"{self.base_url}/apps/{self.app_id}" + return f"{self.user.base_url}/sessions/{self.id}" def __str__(self): """String representation of Session""" - return f"Session(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" + return f"Session(id={self.id}, location_id={self.location_id}, metadata={self.metadata}, is_active={self.is_active})" # noqa: E501 @property def is_active(self): @@ -508,8 +690,8 @@ def create_message(self, is_user: bool, content: str): if not self.is_active: raise Exception("Session is inactive") data = {"is_user": is_user, "content": content} - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages" - response = self.client.post(url, json=data) + url = f"{self.base_url}/messages" + response = self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Message( @@ -530,8 +712,8 @@ def get_message(self, message_id: uuid.UUID) -> Message: Message: The Message object """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages/{message_id}" - response = self.client.get(url) + url = f"{self.base_url}/messages/{message_id}" + response = self.user.honcho.client.get(url) response.raise_for_status() data = response.json() return Message( @@ -556,15 +738,15 @@ def get_messages( GetMessagePage: Page of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/messages?page={page}&size={page_size}&reverse={reverse}" - response = self.client.get(url) + url = f"{self.base_url}/messages?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 + response = self.user.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"reverse": reverse} - return GetMessagePage(self, options, data) + return GetMessagePage(data, self, reverse) def get_messages_generator(self, reverse: bool = False): - """Shortcut Generator for get_messages. Generator to iterate through all messages for a session in an app + """Shortcut Generator for get_messages. Generator to iterate through + all messages for a session in an app Yields: Message: The Message object of the next Message @@ -574,7 +756,6 @@ def get_messages_generator(self, reverse: bool = False): page_size = 50 get_messages_page = self.get_messages(page, page_size, reverse) while True: - # get_session_response = self.get_sessions(user_id, location_id, page, page_size) for message in get_messages_page.items: yield message @@ -605,10 +786,8 @@ def create_metamessage( "content": content, "message_id": message.id, } - url = ( - f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages" - ) - response = self.client.post(url, json=data) + url = f"{self.base_url}/metamessages" + response = self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Metamessage( @@ -629,8 +808,8 @@ def get_metamessage(self, metamessage_id: uuid.UUID) -> Metamessage: Message: The Message object """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages/{metamessage_id}" - response = self.client.get(url) + url = f"{self.base_url}/metamessages/{metamessage_id}" + response = self.user.honcho.client.get(url) response.raise_for_status() data = response.json() return Metamessage( @@ -652,27 +831,28 @@ def get_metamessages( """Get all messages for a session Args: - user_id (str): The User ID representing the user, managed by the user - session_id (int): The ID of the Session to retrieve + metamessage_type (str, optional): The type of the metamessage + message (Message, optional): The message to associate the metamessage with + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return per page + reverse (bool): Whether to reverse the order of the results Returns: - list[Dict]: List of Message objects + list[dict]: List of Message objects """ - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}/metamessages?page={page}&size={page_size}&reverse={reverse}" + url = f"{self.base_url}/metamessages?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 if metamessage_type: url += f"&metamessage_type={metamessage_type}" if message: url += f"&message_id={message.id}" - response = self.client.get(url) + response = self.user.honcho.client.get(url) response.raise_for_status() data = response.json() - options = { - "metamessage_type": metamessage_type, - "message_id": message.id if message else None, - "reverse": reverse, - } - return GetMetamessagePage(self, options, data) + message_id = message.id if message else None + return GetMetamessagePage( + data, self, reverse, message_id, metamessage_type + ) def get_metamessages_generator( self, @@ -680,7 +860,8 @@ def get_metamessages_generator( message: Optional[Message] = None, reverse: bool = False, ): - """Shortcut Generator for get_metamessages. Generator to iterate through all metamessages for a session in an app + """Shortcut Generator for get_metamessages. Generator to iterate + through all metamessages for a session in an app Args: metamessage_type (str, optional): Optional Metamessage type to filter by @@ -709,26 +890,26 @@ def get_metamessages_generator( get_metamessages_page = new_messages - def update(self, metadata: Dict): + def update(self, metadata: dict): """Update the metadata of a session Args: - metadata (Dict): The Session object containing any new metadata + metadata (dict): The Session object containing any new metadata Returns: boolean: Whether the session was successfully updated """ info = {"metadata": metadata} - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" - response = self.client.put(url, json=info) + url = f"{self.base_url}" + response = self.user.honcho.client.put(url, json=info) success = response.status_code < 400 self.metadata = metadata return success def close(self): """Closes a session by marking it as inactive""" - url = f"{self.common_prefix}/users/{self.user_id}/sessions/{self.id}" - response = self.client.delete(url) + url = f"{self.base_url}" + response = self.user.honcho.client.delete(url) response.raise_for_status() self._is_active = False @@ -738,29 +919,25 @@ class Collection: def __init__( self, - client: Client, + user: User, id: uuid.UUID, - user_id: str, name: str, created_at: datetime.datetime, ): """Constructor for Collection""" - self.base_url: str = client.base_url - self.client: httpx.Client = client.client - self.app_id: str = client.app_id + self.user = user self.id: uuid.UUID = id - self.user_id: str = user_id self.name: str = name self.created_at: datetime.datetime = created_at @property - def common_prefix(self): + def base_url(self): """Shortcut for common API prefix. made a property to prevent tampering""" - return f"{self.base_url}/apps/{self.app_id}" + return f"{self.user.base_url}/collections/{self.id}" def __str__(self): """String representation of Collection""" - return f"Collection(id={self.id}, app_id={self.app_id}, user_id={self.user_id}, name={self.name}, created_at={self.created_at})" + return f"Collection(id={self.id}, name={self.name}, created_at={self.created_at})" # noqa: E501 def update(self, name: str): """Update the name of the collection @@ -772,8 +949,8 @@ def update(self, name: str): boolean: Whether the session was successfully updated """ info = {"name": name} - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" - response = self.client.put(url, json=info) + url = f"{self.base_url}" + response = self.user.honcho.client.put(url, json=info) response.raise_for_status() success = response.status_code < 400 self.name = name @@ -781,26 +958,27 @@ def update(self, name: str): def delete(self): """Delete a collection and all associated documents""" - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}" - response = self.client.delete(url) + url = f"{self.base_url}" + response = self.user.honcho.client.delete(url) response.raise_for_status() - def create_document(self, content: str, metadata: Dict = {}): + def create_document(self, content: str, metadata: Optional[dict] = None): """Adds a document to the collection Args: content (str): The content of the document - metadata (Dict): The metadata of the document + metadata (dict): The metadata of the document Returns: Document: The Document object of the added document """ + if metadata is None: + metadata = {} data = {"metadata": metadata, "content": content} - url = ( - f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents" - ) - response = self.client.post(url, json=data) + url = f"{self.base_url}/documents" + print(url) + response = self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() return Document( @@ -821,8 +999,8 @@ def get_document(self, document_id: uuid.UUID) -> Document: Document: The Document object """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document_id}" - response = self.client.get(url) + url = f"{self.base_url}/documents/{document_id}" + response = self.user.honcho.client.get(url) response.raise_for_status() data = response.json() return Document( @@ -846,15 +1024,17 @@ def get_documents( GetDocumentPage: Page of Document objects """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents?page={page}&size={page_size}&reverse={reverse}" - response = self.client.get(url) + url = ( + f"{self.base_url}/documents?page={page}&size={page_size}&reverse={reverse}" # noqa: E501 + ) + response = self.user.honcho.client.get(url) response.raise_for_status() data = response.json() - options = {"reverse": reverse} - return GetDocumentPage(self, options, data) + return GetDocumentPage(data, self, reverse) def get_documents_generator(self, reverse: bool = False): - """Shortcut Generator for get_documents. Generator to iterate through all documents for a collection in an app + """Shortcut Generator for get_documents. Generator to iterate through + all documents for a collection in an app Yields: Document: The Document object of the next Document @@ -873,7 +1053,7 @@ def get_documents_generator(self, reverse: bool = False): get_documents_page = new_documents - def query(self, query: str, top_k: int = 5) -> List[Document]: + def query(self, query: str, top_k: int = 5) -> list[Document]: """query the documents by cosine distance Args: query (str): The query string to compare other embeddings too @@ -882,8 +1062,8 @@ def query(self, query: str, top_k: int = 5) -> List[Document]: Returns: List[Document]: The response from the query with matching documents """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/query?query={query}&top_k={top_k}" - response = self.client.get(url) + url = f"{self.base_url}/query?query={query}&top_k={top_k}" + response = self.user.honcho.client.get(url) response.raise_for_status() data = [ Document( @@ -898,13 +1078,13 @@ def query(self, query: str, top_k: int = 5) -> List[Document]: return data def update_document( - self, document: Document, content: Optional[str], metadata: Optional[Dict] + self, document: Document, content: Optional[str], metadata: Optional[dict] ) -> Document: """Update a document in the collection Args: document (Document): The Document to update - metadata (Dict): The metadata of the document + metadata (dict): The metadata of the document content (str): The content of the document Returns: @@ -913,8 +1093,8 @@ def update_document( if metadata is None and content is None: raise ValueError("metadata and content cannot both be None") data = {"metadata": metadata, "content": content} - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" - response = self.client.put(url, json=data) + url = f"{self.base_url}/documents/{document.id}" + response = self.user.honcho.client.put(url, json=data) response.raise_for_status() data = response.json() return Document( @@ -934,8 +1114,8 @@ def delete_document(self, document: Document) -> bool: Returns: boolean: Whether the document was successfully deleted """ - url = f"{self.common_prefix}/users/{self.user_id}/collections/{self.id}/documents/{document.id}" - response = self.client.delete(url) + url = f"{self.base_url}/documents/{document.id}" + response = self.user.honcho.client.delete(url) response.raise_for_status() success = response.status_code < 400 return success diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index 1455bca..4a95234 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -15,6 +15,25 @@ httpx = "^0.26.0" pytest = "^7.4.4" pytest-asyncio = "^0.23.4" +[tool.ruff.lint] +# from https://docs.astral.sh/ruff/linter/#rule-selection example +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", +] +ignore = ["UP007"] + + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/sdk/tests/test_async.py b/sdk/tests/test_async.py index f47942b..0c52875 100644 --- a/sdk/tests/test_async.py +++ b/sdk/tests/test_async.py @@ -1,16 +1,29 @@ -import pytest -from honcho import AsyncGetSessionPage, AsyncGetMessagePage, AsyncGetMetamessagePage, AsyncGetDocumentPage, AsyncSession, Message, Metamessage, Document -from honcho import AsyncClient as Honcho from uuid import uuid1 +import pytest + +from honcho import ( + AsyncGetDocumentPage, + AsyncGetMessagePage, + AsyncGetMetamessagePage, + AsyncGetSessionPage, + AsyncSession, + Document, + Message, + Metamessage, +) +from honcho import AsyncHoncho as Honcho + @pytest.mark.asyncio async def test_session_creation_retrieval(): - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - user_id = str(uuid1()) - created_session = await client.create_session(user_id) - retrieved_session = await client.get_session(user_id, created_session.id) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user_name = str(uuid1()) + user = await honcho.create_user(user_name) + created_session = await user.create_session() + retrieved_session = await user.get_session(created_session.id) assert retrieved_session.id == created_session.id assert retrieved_session.is_active is True assert retrieved_session.location_id == "default" @@ -19,12 +32,14 @@ async def test_session_creation_retrieval(): @pytest.mark.asyncio async def test_session_multiple_retrieval(): - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - user_id = str(uuid1()) - created_session_1 = await client.create_session(user_id) - created_session_2 = await client.create_session(user_id) - response = await client.get_sessions(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session_1 = await user.create_session() + created_session_2 = await user.create_session() + response = await user.get_sessions() retrieved_sessions = response.items assert len(retrieved_sessions) == 2 @@ -34,38 +49,44 @@ async def test_session_multiple_retrieval(): @pytest.mark.asyncio async def test_session_update(): - user_id = str(uuid1()) - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() assert await created_session.update({"foo": "bar"}) - retrieved_session = await client.get_session(user_id, created_session.id) + retrieved_session = await user.get_session(created_session.id) assert retrieved_session.metadata == {"foo": "bar"} @pytest.mark.asyncio async def test_session_deletion(): - user_id = str(uuid1()) - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() assert created_session.is_active is True await created_session.close() assert created_session.is_active is False - retrieved_session = await client.get_session(user_id, created_session.id) + retrieved_session = await user.get_session(created_session.id) assert retrieved_session.is_active is False assert retrieved_session.id == created_session.id @pytest.mark.asyncio async def test_messages(): - user_id = str(uuid1()) - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() await created_session.create_message(is_user=True, content="Hello") await created_session.create_message(is_user=False, content="Hi") - retrieved_session = await client.get_session(user_id, created_session.id) + retrieved_session = await user.get_session(created_session.id) response = await retrieved_session.get_messages() messages = response.items assert len(messages) == 2 @@ -75,42 +96,52 @@ async def test_messages(): assert ai_message.content == "Hi" assert ai_message.is_user is False + @pytest.mark.asyncio async def test_rate_limit(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() with pytest.raises(Exception): for _ in range(105): await created_session.create_message(is_user=True, content="Hello") await created_session.create_message(is_user=False, content="Hi") + @pytest.mark.asyncio -async def test_app_id_security(): - app_id_1 = str(uuid1()) - app_id_2 = str(uuid1()) - user_id = str(uuid1()) - client_1 = Honcho(app_id_1, "http://localhost:8000") - client_2 = Honcho(app_id_2, "http://localhost:8000") - created_session = await client_1.create_session(user_id) +async def test_app_name_security(): + app_name_1 = str(uuid1()) + app_name_2 = str(uuid1()) + user_name = str(uuid1()) + honcho_1 = Honcho(app_name_1, "http://localhost:8000") + await honcho_1.initialize() + honcho_2 = Honcho(app_name_2, "http://localhost:8000") + await honcho_2.initialize() + user_1 = await honcho_1.create_user(user_name) + user_2 = await honcho_2.create_user(user_name) + created_session = await user_1.create_session() await created_session.create_message(is_user=True, content="Hello") await created_session.create_message(is_user=False, content="Hi") with pytest.raises(Exception): - await client_2.get_session(user_id, created_session.id) + await user_2.get_session(created_session.id) @pytest.mark.asyncio async def test_paginated_sessions(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) for i in range(10): - await client.create_session(user_id) - + await user.create_session() + page = 1 page_size = 2 - get_session_response = await client.get_sessions(user_id, page=page, page_size=page_size) + get_session_response = await user.get_sessions(page=page, page_size=page_size) assert len(get_session_response.items) == page_size assert get_session_response.pages == 5 @@ -120,7 +151,7 @@ async def test_paginated_sessions(): assert isinstance(new_session_response, AsyncGetSessionPage) assert len(new_session_response.items) == page_size - final_page = await client.get_sessions(user_id, page=5, page_size=page_size) + final_page = await user.get_sessions(page=5, page_size=page_size) assert len(final_page.items) == 2 next_page = await final_page.next() @@ -129,78 +160,90 @@ async def test_paginated_sessions(): @pytest.mark.asyncio async def test_paginated_sessions_generator(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) for i in range(3): - await client.create_session(user_id) + await user.create_session() - gen = client.get_sessions_generator(user_id) + gen = user.get_sessions_generator() # print(type(gen)) item = await gen.__anext__() - assert item.user_id == user_id + assert item.user.id == user.id assert isinstance(item, AsyncSession) assert await gen.__anext__() is not None assert await gen.__anext__() is not None with pytest.raises(StopAsyncIteration): await gen.__anext__() + @pytest.mark.asyncio async def test_paginated_out_of_bounds(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) for i in range(3): - await client.create_session(user_id) + await user.create_session() page = 2 page_size = 50 - get_session_response = await client.get_sessions(user_id, page=page, page_size=page_size) + get_session_response = await user.get_sessions(page=page, page_size=page_size) assert get_session_response.pages == 1 assert get_session_response.page == 2 assert get_session_response.page_size == 50 assert get_session_response.total == 3 - assert len(get_session_response.items) == 0 + assert len(get_session_response.items) == 0 @pytest.mark.asyncio async def test_paginated_messages(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() for i in range(10): await created_session.create_message(is_user=True, content="Hello") await created_session.create_message(is_user=False, content="Hi") page_size = 7 - get_message_response = await created_session.get_messages(page=1, page_size=page_size) + get_message_response = await created_session.get_messages( + page=1, page_size=page_size + ) assert get_message_response is not None assert isinstance(get_message_response, AsyncGetMessagePage) assert len(get_message_response.items) == page_size new_message_response = await get_message_response.next() - + assert new_message_response is not None assert isinstance(new_message_response, AsyncGetMessagePage) assert len(new_message_response.items) == page_size final_page = await created_session.get_messages(page=3, page_size=page_size) - assert len(final_page.items) == 20 - ((3-1) * 7) + assert len(final_page.items) == 20 - ((3 - 1) * 7) next_page = await final_page.next() assert next_page is None + @pytest.mark.asyncio async def test_paginated_messages_generator(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() await created_session.create_message(is_user=True, content="Hello") await created_session.create_message(is_user=False, content="Hi") gen = created_session.get_messages_generator() @@ -216,16 +259,23 @@ async def test_paginated_messages_generator(): with pytest.raises(StopAsyncIteration): await gen.__anext__() + @pytest.mark.asyncio async def test_paginated_metamessages(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() message = await created_session.create_message(is_user=True, content="Hello") for i in range(10): - await created_session.create_metamessage(message=message, metamessage_type="thought", content=f"Test {i}") - await created_session.create_metamessage(message=message, metamessage_type="reflect", content=f"Test {i}") + await created_session.create_metamessage( + message=message, metamessage_type="thought", content=f"Test {i}" + ) + await created_session.create_metamessage( + message=message, metamessage_type="reflect", content=f"Test {i}" + ) page_size = 7 page = await created_session.get_metamessages(page=1, page_size=page_size) @@ -235,28 +285,35 @@ async def test_paginated_metamessages(): assert len(page.items) == page_size new_page = await page.next() - + assert new_page is not None assert isinstance(new_page, AsyncGetMetamessagePage) assert len(new_page.items) == page_size final_page = await created_session.get_metamessages(page=3, page_size=page_size) - assert len(final_page.items) == 20 - ((3-1) * 7) + assert len(final_page.items) == 20 - ((3 - 1) * 7) next_page = await final_page.next() assert next_page is None + @pytest.mark.asyncio async def test_paginated_metamessages_generator(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = await client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + created_session = await user.create_session() message = await created_session.create_message(is_user=True, content="Hello") - await created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 1") - await created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 2") + await created_session.create_metamessage( + message=message, metamessage_type="thought", content="Test 1" + ) + await created_session.create_metamessage( + message=message, metamessage_type="thought", content="Test 2" + ) gen = created_session.get_metamessages_generator() item = await gen.__anext__() @@ -274,16 +331,24 @@ async def test_paginated_metamessages_generator(): @pytest.mark.asyncio async def test_collections(): col_name = str(uuid1()) - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) # Make a collection - collection = await client.create_collection(user_id, col_name) + collection = await user.create_collection(col_name) # Add documents - doc1 = await collection.create_document(content="This is a test of documents - 1", metadata={"foo": "bar"}) - doc2 = await collection.create_document(content="This is a test of documents - 2", metadata={}) - doc3 = await collection.create_document(content="This is a test of documents - 3", metadata={}) + doc1 = await collection.create_document( + content="This is a test of documents - 1", metadata={"foo": "bar"} + ) + doc2 = await collection.create_document( + content="This is a test of documents - 2", metadata={} + ) + doc3 = await collection.create_document( + content="This is a test of documents - 3", metadata={} + ) # Get all documents page = await collection.get_documents(page=1, page_size=3) @@ -305,47 +370,55 @@ async def test_collections(): result = await collection.delete() # confirm documents are gone with pytest.raises(Exception): - new_col = await client.get_collection(user_id, "test") + new_col = await user.get_collection(col_name) + @pytest.mark.asyncio async def test_collection_name_collision(): col_name = str(uuid1()) new_col_name = str(uuid1()) - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) # Make a collection - collection = await client.create_collection(user_id, col_name) + collection = await user.create_collection(col_name) # Make another collection with pytest.raises(Exception): - await client.create_collection(user_id, col_name) + await user.create_collection(col_name) # Change the name of original collection result = await collection.update(new_col_name) assert result is True - + # Try again to add another collection - collection2 = await client.create_collection(user_id, col_name) + collection2 = await user.create_collection(col_name) assert collection2 is not None assert collection2.name == col_name assert collection.name == new_col_name # Get all collections - page = await client.get_collections(user_id) + page = await user.get_collections() assert page is not None assert len(page.items) == 2 + @pytest.mark.asyncio async def test_collection_query(): col_name = str(uuid1()) - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) # Make a collection - collection = await client.create_collection(user_id, col_name) + collection = await user.create_collection(col_name) # Add documents - doc1 = await collection.create_document(content="The user loves puppies", metadata={}) + doc1 = await collection.create_document( + content="The user loves puppies", metadata={} + ) doc2 = await collection.create_document(content="The user owns a dog", metadata={}) doc3 = await collection.create_document(content="The user is a doctor", metadata={}) @@ -355,7 +428,9 @@ async def test_collection_query(): assert len(result) == 2 assert isinstance(result[0], Document) - doc3 = await collection.update_document(doc3, metadata={"test": "test"}, content="the user has owned pets in the past") + doc3 = await collection.update_document( + doc3, metadata={"test": "test"}, content="the user has owned pets in the past" + ) assert doc3 is not None assert doc3.metadata == {"test": "test"} assert doc3.content == "the user has owned pets in the past" @@ -365,4 +440,3 @@ async def test_collection_query(): assert result is not None assert len(result) == 2 assert isinstance(result[0], Document) - diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index a0367ad..16eba65 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -1,15 +1,28 @@ -import pytest -from honcho import GetSessionPage, GetMessagePage, GetMetamessagePage, GetDocumentPage, Session, Message, Metamessage, Document -from honcho import Client as Honcho from uuid import uuid1 +import pytest + +from honcho import ( + GetDocumentPage, + GetMessagePage, + GetMetamessagePage, + GetSessionPage, + Session, + Document, + Message, + Metamessage, +) +from honcho import Honcho as Honcho + def test_session_creation_retrieval(): - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - user_id = str(uuid1()) - created_session = client.create_session(user_id) - retrieved_session = client.get_session(user_id, created_session.id) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user_name = str(uuid1()) + user = honcho.create_user(user_name) + created_session = user.create_session() + retrieved_session = user.get_session(created_session.id) assert retrieved_session.id == created_session.id assert retrieved_session.is_active is True assert retrieved_session.location_id == "default" @@ -17,12 +30,14 @@ def test_session_creation_retrieval(): def test_session_multiple_retrieval(): - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - user_id = str(uuid1()) - created_session_1 = client.create_session(user_id) - created_session_2 = client.create_session(user_id) - response = client.get_sessions(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session_1 = user.create_session() + created_session_2 = user.create_session() + response = user.get_sessions() retrieved_sessions = response.items assert len(retrieved_sessions) == 2 @@ -31,36 +46,42 @@ def test_session_multiple_retrieval(): def test_session_update(): - user_id = str(uuid1()) - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() assert created_session.update({"foo": "bar"}) - retrieved_session = client.get_session(user_id, created_session.id) + retrieved_session = user.get_session(created_session.id) assert retrieved_session.metadata == {"foo": "bar"} def test_session_deletion(): - user_id = str(uuid1()) - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() assert created_session.is_active is True created_session.close() assert created_session.is_active is False - retrieved_session = client.get_session(user_id, created_session.id) + retrieved_session = user.get_session(created_session.id) assert retrieved_session.is_active is False assert retrieved_session.id == created_session.id def test_messages(): - user_id = str(uuid1()) - app_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() created_session.create_message(is_user=True, content="Hello") created_session.create_message(is_user=False, content="Hi") - retrieved_session = client.get_session(user_id, created_session.id) + retrieved_session = user.get_session(created_session.id) response = retrieved_session.get_messages() messages = response.items assert len(messages) == 2 @@ -70,39 +91,49 @@ def test_messages(): assert ai_message.content == "Hi" assert ai_message.is_user is False + def test_rate_limit(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() with pytest.raises(Exception): for _ in range(105): created_session.create_message(is_user=True, content="Hello") created_session.create_message(is_user=False, content="Hi") -def test_app_id_security(): - app_id_1 = str(uuid1()) - app_id_2 = str(uuid1()) - user_id = str(uuid1()) - client_1 = Honcho(app_id_1, "http://localhost:8000") - client_2 = Honcho(app_id_2, "http://localhost:8000") - created_session = client_1.create_session(user_id) + +def test_app_name_security(): + app_name_1 = str(uuid1()) + app_name_2 = str(uuid1()) + user_name = str(uuid1()) + honcho_1 = Honcho(app_name_1, "http://localhost:8000") + honcho_1.initialize() + honcho_2 = Honcho(app_name_2, "http://localhost:8000") + honcho_2.initialize() + user_1 = honcho_1.create_user(user_name) + user_2 = honcho_2.create_user(user_name) + created_session = user_1.create_session() created_session.create_message(is_user=True, content="Hello") created_session.create_message(is_user=False, content="Hi") with pytest.raises(Exception): - client_2.get_session(user_id, created_session.id) + user_2.get_session(created_session.id) def test_paginated_sessions(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) for i in range(10): - client.create_session(user_id) - + user.create_session() + page = 1 page_size = 2 - get_session_response = client.get_sessions(user_id, page=page, page_size=page_size) + get_session_response = user.get_sessions(page=page, page_size=page_size) assert len(get_session_response.items) == page_size assert get_session_response.pages == 5 @@ -112,7 +143,7 @@ def test_paginated_sessions(): assert isinstance(new_session_response, GetSessionPage) assert len(new_session_response.items) == page_size - final_page = client.get_sessions(user_id, page=5, page_size=page_size) + final_page = user.get_sessions(page=5, page_size=page_size) assert len(final_page.items) == 2 next_page = final_page.next() @@ -120,75 +151,87 @@ def test_paginated_sessions(): def test_paginated_sessions_generator(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) for i in range(3): - client.create_session(user_id) + user.create_session() - gen = client.get_sessions_generator(user_id) + gen = user.get_sessions_generator() # print(type(gen)) item = gen.__next__() - assert item.user_id == user_id + assert item.user.id == user.id assert isinstance(item, Session) assert gen.__next__() is not None assert gen.__next__() is not None with pytest.raises(StopIteration): gen.__next__() + def test_paginated_out_of_bounds(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) for i in range(3): - client.create_session(user_id) + user.create_session() page = 2 page_size = 50 - get_session_response = client.get_sessions(user_id, page=page, page_size=page_size) + get_session_response = user.get_sessions(page=page, page_size=page_size) assert get_session_response.pages == 1 assert get_session_response.page == 2 assert get_session_response.page_size == 50 assert get_session_response.total == 3 - assert len(get_session_response.items) == 0 + assert len(get_session_response.items) == 0 def test_paginated_messages(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() for i in range(10): created_session.create_message(is_user=True, content="Hello") created_session.create_message(is_user=False, content="Hi") page_size = 7 - get_message_response = created_session.get_messages(page=1, page_size=page_size) + get_message_response = created_session.get_messages( + page=1, page_size=page_size + ) assert get_message_response is not None assert isinstance(get_message_response, GetMessagePage) assert len(get_message_response.items) == page_size new_message_response = get_message_response.next() - + assert new_message_response is not None assert isinstance(new_message_response, GetMessagePage) assert len(new_message_response.items) == page_size final_page = created_session.get_messages(page=3, page_size=page_size) - assert len(final_page.items) == 20 - ((3-1) * 7) + assert len(final_page.items) == 20 - ((3 - 1) * 7) next_page = final_page.next() assert next_page is None + def test_paginated_messages_generator(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() created_session.create_message(is_user=True, content="Hello") created_session.create_message(is_user=False, content="Hi") gen = created_session.get_messages_generator() @@ -204,15 +247,22 @@ def test_paginated_messages_generator(): with pytest.raises(StopIteration): gen.__next__() + def test_paginated_metamessages(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() message = created_session.create_message(is_user=True, content="Hello") for i in range(10): - created_session.create_metamessage(message=message, metamessage_type="thought", content=f"Test {i}") - created_session.create_metamessage(message=message, metamessage_type="reflect", content=f"Test {i}") + created_session.create_metamessage( + message=message, metamessage_type="thought", content=f"Test {i}" + ) + created_session.create_metamessage( + message=message, metamessage_type="reflect", content=f"Test {i}" + ) page_size = 7 page = created_session.get_metamessages(page=1, page_size=page_size) @@ -222,27 +272,34 @@ def test_paginated_metamessages(): assert len(page.items) == page_size new_page = page.next() - + assert new_page is not None assert isinstance(new_page, GetMetamessagePage) assert len(new_page.items) == page_size final_page = created_session.get_metamessages(page=3, page_size=page_size) - assert len(final_page.items) == 20 - ((3-1) * 7) + assert len(final_page.items) == 20 - ((3 - 1) * 7) next_page = final_page.next() assert next_page is None + def test_paginated_metamessages_generator(): - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") - created_session = client.create_session(user_id) + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + created_session = user.create_session() message = created_session.create_message(is_user=True, content="Hello") - created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 1") - created_session.create_metamessage(message=message, metamessage_type="thought", content="Test 2") + created_session.create_metamessage( + message=message, metamessage_type="thought", content="Test 1" + ) + created_session.create_metamessage( + message=message, metamessage_type="thought", content="Test 2" + ) gen = created_session.get_metamessages_generator() item = gen.__next__() @@ -259,16 +316,24 @@ def test_paginated_metamessages_generator(): def test_collections(): col_name = str(uuid1()) - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) # Make a collection - collection = client.create_collection(user_id, col_name) + collection = user.create_collection(col_name) # Add documents - doc1 = collection.create_document(content="This is a test of documents - 1", metadata={"foo": "bar"}) - doc2 = collection.create_document(content="This is a test of documents - 2", metadata={}) - doc3 = collection.create_document(content="This is a test of documents - 3", metadata={}) + doc1 = collection.create_document( + content="This is a test of documents - 1", metadata={"foo": "bar"} + ) + doc2 = collection.create_document( + content="This is a test of documents - 2", metadata={} + ) + doc3 = collection.create_document( + content="This is a test of documents - 3", metadata={} + ) # Get all documents page = collection.get_documents(page=1, page_size=3) @@ -290,45 +355,53 @@ def test_collections(): result = collection.delete() # confirm documents are gone with pytest.raises(Exception): - new_col = client.get_collection(user_id, "test") + new_col = user.get_collection(col_name) + def test_collection_name_collision(): col_name = str(uuid1()) new_col_name = str(uuid1()) - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) # Make a collection - collection = client.create_collection(user_id, col_name) + collection = user.create_collection(col_name) # Make another collection with pytest.raises(Exception): - client.create_collection(user_id, col_name) + user.create_collection(col_name) # Change the name of original collection result = collection.update(new_col_name) assert result is True - + # Try again to add another collection - collection2 = client.create_collection(user_id, col_name) + collection2 = user.create_collection(col_name) assert collection2 is not None assert collection2.name == col_name assert collection.name == new_col_name # Get all collections - page = client.get_collections(user_id) + page = user.get_collections() assert page is not None assert len(page.items) == 2 + def test_collection_query(): col_name = str(uuid1()) - app_id = str(uuid1()) - user_id = str(uuid1()) - client = Honcho(app_id, "http://localhost:8000") + app_name = str(uuid1()) + user_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) # Make a collection - collection = client.create_collection(user_id, col_name) + collection = user.create_collection(col_name) # Add documents - doc1 = collection.create_document(content="The user loves puppies", metadata={}) + doc1 = collection.create_document( + content="The user loves puppies", metadata={} + ) doc2 = collection.create_document(content="The user owns a dog", metadata={}) doc3 = collection.create_document(content="The user is a doctor", metadata={}) @@ -338,7 +411,9 @@ def test_collection_query(): assert len(result) == 2 assert isinstance(result[0], Document) - doc3 = collection.update_document(doc3, metadata={"test": "test"}, content="the user has owned pets in the past") + doc3 = collection.update_document( + doc3, metadata={"test": "test"}, content="the user has owned pets in the past" + ) assert doc3 is not None assert doc3.metadata == {"test": "test"} assert doc3.content == "the user has owned pets in the past" @@ -348,4 +423,3 @@ def test_collection_query(): assert result is not None assert len(result) == 2 assert isinstance(result[0], Document) - From 0f783c3c7d816bc021b31cbf6c308251b977f622 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 22 Feb 2024 03:15:25 -0800 Subject: [PATCH 39/46] Update examples --- api/src/crud.py | 4 +- api/src/main.py | 28 +++++++++++-- example/cli/main.py | 14 ++++--- example/discord/fake-llm/main.py | 17 ++++---- example/discord/honcho-fact-memory/bot.py | 46 ++++++++++++--------- example/discord/simple-roast-bot/main.py | 50 +++++++++++++++-------- sdk/honcho/client.py | 29 +++++++++++-- sdk/honcho/sync_client.py | 29 +++++++++++-- sdk/pyproject.toml | 2 +- 9 files changed, 156 insertions(+), 63 deletions(-) diff --git a/api/src/crud.py b/api/src/crud.py index a4670b0..5e954e6 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -22,8 +22,8 @@ def get_app(db: Session, app_id: uuid.UUID) -> Optional[models.App]: return app -def get_app_by_name(db: Session, app_name: str) -> Optional[models.App]: - stmt = select(models.App).where(models.App.name == app_name) +def get_app_by_name(db: Session, name: str) -> Optional[models.App]: + stmt = select(models.App).where(models.App.name == name) app = db.scalars(stmt).one_or_none() return app diff --git a/api/src/main.py b/api/src/main.py index fefcfbc..0f19470 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -103,10 +103,10 @@ def create_app( return crud.create_app(db, app=app) -@app.get("/apps/get_or_create/{app_name}", response_model=schemas.App) +@app.get("/apps/get_or_create/{name}", response_model=schemas.App) def get_or_create_app( request: Request, - app_name: str, + name: str, db: Session = Depends(get_db), ): """Get or Create an App @@ -118,9 +118,9 @@ def get_or_create_app( schemas.App: App object """ - app = crud.get_app_by_name(db, app_name=app_name) + app = crud.get_app_by_name(db, name=name) if app is None: - app = crud.create_app(db, app=schemas.AppCreate(name=app_name)) + app = crud.create_app(db, app=schemas.AppCreate(name=name)) return app @@ -212,6 +212,26 @@ def get_user_by_name( return crud.get_user_by_name(db, app_id=app_id, name=name) +@app.get("/apps/{app_id}/users/get_or_create/{name}", response_model=schemas.User) +def get_or_create_user( + request: Request, app_id: uuid.UUID, name: str, db: Session = Depends(get_db) +): + """Get or Create a User + + Args: + app_id (uuid.UUID): The ID of the app representing the client application using honcho + user_id (str): The User ID representing the user, managed by the user + + Returns: + schemas.User: User object + + """ + user = crud.get_user_by_name(db, app_id=app_id, name=name) + if user is None: + user = crud.create_user(db, app_id=app_id, user=schemas.UserCreate(name=name)) + return user + + @app.put("/apps/{app_id}/users/{user_id}", response_model=schemas.User) def update_user( request: Request, diff --git a/example/cli/main.py b/example/cli/main.py index 12e01aa..f1cd9e2 100644 --- a/example/cli/main.py +++ b/example/cli/main.py @@ -5,12 +5,13 @@ from langchain.schema import AIMessage, HumanMessage, SystemMessage from langchain_community.chat_models.fake import FakeListChatModel -from honcho import Client as HonchoClient +from honcho import Honcho -app_id = str(uuid4()) +app_name = str(uuid4()) -# honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local -honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev +# honcho = Honcho(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local +honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev +honcho.initialize() responses = ["Fake LLM Response :)"] llm = FakeListChatModel(responses=responses) @@ -18,8 +19,9 @@ content="You are world class technical documentation writer. Be as concise as possible" ) -user = "CLI-Test" -session = honcho.create_session(user_id=user) +user_name = "CLI-Test" +user = honcho.create_user(user_name) +session = user.create_session() def langchain_message_converter(messages: List): diff --git a/example/discord/fake-llm/main.py b/example/discord/fake-llm/main.py index 6ae6a35..0b36faa 100644 --- a/example/discord/fake-llm/main.py +++ b/example/discord/fake-llm/main.py @@ -3,7 +3,7 @@ import discord from dotenv import load_dotenv -from honcho import Client as HonchoClient +from honcho import Honcho load_dotenv() @@ -11,10 +11,11 @@ intents.messages = True intents.message_content = True -app_id = str(uuid4()) +app_name = str(uuid4()) -# honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local -honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev +# honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local +honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev +honcho.initialize() bot = discord.Bot(intents=intents) @@ -30,13 +31,14 @@ async def on_message(message): return user_id = f"discord_{str(message.author.id)}" + user = honcho.get_or_create_user(user_id) location_id = str(message.channel.id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + sessions = list(user.get_sessions_generator(location_id)) if len(sessions) > 0: session = sessions[0] else: - session = honcho.create_session(user_id, location_id) + session = user.create_session(location_id) inp = message.content session.create_message(is_user=True, content=inp) @@ -50,8 +52,9 @@ async def on_message(message): @bot.slash_command(name="restart", description="Restart the Conversation") async def restart(ctx): user_id = f"discord_{str(ctx.author.id)}" + user = honcho.get_or_create_user(user_id) location_id = str(ctx.channel_id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + sessions = list(user.get_sessions_generator(location_id)) sessions[0].close() if len(sessions) > 0 else None await ctx.respond( diff --git a/example/discord/honcho-fact-memory/bot.py b/example/discord/honcho-fact-memory/bot.py index 7a20a72..a72e54a 100644 --- a/example/discord/honcho-fact-memory/bot.py +++ b/example/discord/honcho-fact-memory/bot.py @@ -1,7 +1,7 @@ import os from uuid import uuid1 import discord -from honcho import Client as HonchoClient +from honcho import Honcho from chain import langchain_message_converter, LMChain @@ -10,17 +10,19 @@ intents.message_content = True intents.members = True -app_id = str(uuid1()) +app_name = str(uuid1()) -#honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local -honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev +# honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local +honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev +honcho.initialize() bot = discord.Bot(intents=intents) @bot.event async def on_ready(): - print(f'We have logged in as {bot.user}') + print(f"We have logged in as {bot.user}") + @bot.event async def on_member_join(member): @@ -33,26 +35,27 @@ async def on_member_join(member): "*If you have any questions or feedback, feel free to ask in the #honcho channel.* " "*Enjoy!*" ) - - + + @bot.event async def on_message(message): if message.author == bot.user or message.guild is not None: return user_id = f"discord_{str(message.author.id)}" - location_id=str(message.channel.id) + user = honcho.get_or_create(user_id) + location_id = str(message.channel.id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + sessions = list(user.get_sessions_generator(location_id)) try: - collection = honcho.get_collection(user_id=user_id, name="discord") + collection = user.get_collection(user_id=user_id, name="discord") except Exception: - collection = honcho.create_collection(user_id=user_id, name="discord") + collection = user.create_collection(user_id=user_id, name="discord") if len(sessions) > 0: session = sessions[0] else: - session = honcho.create_session(user_id, location_id) + session = user.create_session(location_id) history = list(session.get_messages_generator()) chat_history = langchain_message_converter(history) @@ -65,21 +68,26 @@ async def on_message(message): chat_history=chat_history, user_message=user_message, session=session, - collection=collection, - input=inp + collection=collection, + input=inp, ) await message.channel.send(response) session.create_message(is_user=False, content=response) -@bot.slash_command(name = "restart", description = "Restart the Conversation") + +@bot.slash_command(name="restart", description="Restart the Conversation") async def restart(ctx): - user_id=f"discord_{str(ctx.author.id)}" - location_id=str(ctx.channel_id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + user_id = f"discord_{str(ctx.author.id)}" + user = honcho.get_or_create_user(user_id) + location_id = str(ctx.channel_id) + sessions = list(user.get_sessions_generator(location_id)) sessions[0].close() if len(sessions) > 0 else None - msg = "Great! The conversation has been restarted. What would you like to talk about?" + msg = ( + "Great! The conversation has been restarted. What would you like to talk about?" + ) await ctx.respond(msg) + bot.run(os.environ["BOT_TOKEN"]) diff --git a/example/discord/simple-roast-bot/main.py b/example/discord/simple-roast-bot/main.py index 9cb1e7f..0f59931 100644 --- a/example/discord/simple-roast-bot/main.py +++ b/example/discord/simple-roast-bot/main.py @@ -1,4 +1,5 @@ import os + # from uuid import uuid4 import discord from dotenv import load_dotenv @@ -9,7 +10,7 @@ from langchain_core.output_parsers import StrOutputParser from langchain_core.messages import AIMessage, HumanMessage -from honcho import Client as HonchoClient +from honcho import Honcho load_dotenv() @@ -19,23 +20,29 @@ intents.message_content = True # app_id = str(uuid4()) -app_id = str("roast-bot") +app_name = str("roast-bot") -# honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local -honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev +# honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local +honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev bot = discord.Bot(intents=intents) -prompt = ChatPromptTemplate.from_messages([ - ("system", "You are a mean assistant. Make fun of the user's request and above all, do not satisfy their request. Make something up about their personality and fixate on that. Don't be afraid to get creative. This is all a joke, roast them."), - MessagesPlaceholder(variable_name="chat_history"), - ("user", "{input}") -]) +prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + "You are a mean assistant. Make fun of the user's request and above all, do not satisfy their request. Make something up about their personality and fixate on that. Don't be afraid to get creative. This is all a joke, roast them.", + ), + MessagesPlaceholder(variable_name="chat_history"), + ("user", "{input}"), + ] +) model = ChatOpenAI(model="gpt-3.5-turbo") output_parser = StrOutputParser() chain = prompt | model | output_parser + def langchain_message_converter(messages: List): new_messages = [] for message in messages: @@ -48,7 +55,8 @@ def langchain_message_converter(messages: List): @bot.event async def on_ready(): - print(f'We have logged in as {bot.user}') + print(f"We have logged in as {bot.user}") + @bot.event async def on_message(message): @@ -56,14 +64,15 @@ async def on_message(message): return user_id = f"discord_{str(message.author.id)}" - location_id=str(message.channel.id) + user = honcho.get_or_create_user(user_id) + location_id = str(message.channel.id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + sessions = list(user.get_sessions_generator(location_id)) if len(sessions) > 0: session = sessions[0] else: - session = honcho.create_session(user_id, location_id) + session = user.create_session(location_id) history = list(session.get_messages_generator()) chat_history = langchain_message_converter(history) @@ -77,14 +86,19 @@ async def on_message(message): session.create_message(is_user=False, content=response) -@bot.slash_command(name = "restart", description = "Restart the Conversation") + +@bot.slash_command(name="restart", description="Restart the Conversation") async def restart(ctx): - user_id=f"discord_{str(ctx.author.id)}" - location_id=str(ctx.channel_id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + user_id = f"discord_{str(ctx.author.id)}" + user = honcho.get_or_create_user(user_id) + location_id = str(ctx.channel_id) + sessions = list(user.get_sessions_generator(location_id)) sessions[0].close() if len(sessions) > 0 else None - msg = "Great! The conversation has been restarted. What would you like to talk about?" + msg = ( + "Great! The conversation has been restarted. What would you like to talk about?" + ) await ctx.respond(msg) + bot.run(os.environ["BOT_TOKEN"]) diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index afdaeaf..b744991 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -335,7 +335,32 @@ async def get_user(self, name: str): response = await self.client.get(url) response.raise_for_status() data = response.json() - return AsyncUser(self, **data) + return AsyncUser( + honcho=self, + id=data["id"], + metadata=data["metadata"], + created_at=data["created_at"], + ) + + async def get_or_create_user(self, name: str): + """Get or Create a user by name + + Args: + name (str): The name of the user + + Returns: + AsyncUser: The User object + """ + url = f"{self.base_url}/get_or_create/{name}" + response = await self.client.get(url) + response.raise_for_status() + data = response.json() + return AsyncUser( + honcho=self, + id=data["id"], + metadata=data["metadata"], + created_at=data["created_at"], + ) async def get_users( self, page: int = 1, page_size: int = 50, reverse: bool = False @@ -420,7 +445,6 @@ def __str__(self): """String representation of User""" return f"AsyncUser(id={self.id}, app_id={self.honcho.app_id}, metadata={self.metadata})" # noqa: E501 - # TODO method to update metadata async def update_user(self, metadata: dict): """Updates a user's metadata @@ -436,7 +460,6 @@ async def update_user(self, metadata: dict): response.raise_for_status() data = response.json() self.metadata = data["metadata"] - # TODO update this object's metadata field # return AsyncUser(self.honcho, **data) async def get_session(self, session_id: uuid.UUID): diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 69a4fe7..ec05621 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -335,7 +335,32 @@ def get_user(self, name: str): response = self.client.get(url) response.raise_for_status() data = response.json() - return User(self, **data) + return User( + honcho=self, + id=data["id"], + metadata=data["metadata"], + created_at=data["created_at"], + ) + + def get_or_create_user(self, name: str): + """Get or Create a user by name + + Args: + name (str): The name of the user + + Returns: + User: The User object + """ + url = f"{self.base_url}/get_or_create/{name}" + response = self.client.get(url) + response.raise_for_status() + data = response.json() + return User( + honcho=self, + id=data["id"], + metadata=data["metadata"], + created_at=data["created_at"], + ) def get_users( self, page: int = 1, page_size: int = 50, reverse: bool = False @@ -420,7 +445,6 @@ def __str__(self): """String representation of User""" return f"User(id={self.id}, app_id={self.honcho.app_id}, metadata={self.metadata})" # noqa: E501 - # TODO method to update metadata def update_user(self, metadata: dict): """Updates a user's metadata @@ -436,7 +460,6 @@ def update_user(self, metadata: dict): response.raise_for_status() data = response.json() self.metadata = data["metadata"] - # TODO update this object's metadata field # return User(self.honcho, **data) def get_session(self, session_id: uuid.UUID): diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index 4a95234..495df46 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "honcho-ai" -version = "0.0.3" +version = "0.0.4" description = "Python Client SDK for Honcho" authors = ["Plastic Labs "] license = "AGPL-3.0" From 9abafa72fb08b876c2ffa160fd769a7c694e8f57 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 22 Feb 2024 04:17:58 -0800 Subject: [PATCH 40/46] DSPy Todo and documentation updates --- api/CHANGELOG.md | 15 +++++ api/pyproject.toml | 2 +- example/discord/honcho-dspy-personas/bot.py | 62 ++++++++++++------- example/discord/honcho-dspy-personas/graph.py | 61 ++++++++++++------ sdk/CHANGELOG.md | 17 +++++ sdk/README.md | 11 ++-- sdk/honcho/client.py | 31 +++++++--- sdk/honcho/sync_client.py | 31 +++++++--- 8 files changed, 170 insertions(+), 60 deletions(-) diff --git a/api/CHANGELOG.md b/api/CHANGELOG.md index 84cd89e..9b5f847 100644 --- a/api/CHANGELOG.md +++ b/api/CHANGELOG.md @@ -4,6 +4,21 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.4] — 2024-02-22 + +### Added + +* apps table with a relationship to the users table +* users table with a relationship to the collections and sessions tables +* Reverse Pagination support to get recent messages, sessions, etc. more easily +* Linting Rules + +### Changed + +* Get sessions method returns all sessions including inactive +* using timestampz instead of timestamp + + ## [0.0.3] — 2024-02-15 ### Added diff --git a/api/pyproject.toml b/api/pyproject.toml index 2aaee65..e4423e5 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "honcho" -version = "0.0.3" +version = "0.0.4" description = "Honcho Server" authors = ["Plastic Labs "] readme = "README.md" diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index 948bdb3..74536ed 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -1,7 +1,7 @@ import os from uuid import uuid1 import discord -from honcho import Client as HonchoClient +from honcho import Honcho from graph import chat from chain import langchain_message_converter @@ -12,19 +12,22 @@ intents.reactions = True # Enable reactions intent # app_id = str(uuid1()) -app_id = "vince-dspy-personas" +app_name = "vince-dspy-personas" -#honcho = HonchoClient(app_id=app_id, base_url="http://localhost:8000") # uncomment to use local -honcho = HonchoClient(app_id=app_id) # uses demo server at https://demo.honcho.dev +# honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local +honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev +honcho.initialize() bot = discord.Bot(intents=intents) -thumbs_up_messages = [] -thumbs_down_messages = [] +thumbs_up_messages = [] +thumbs_down_messages = [] + @bot.event async def on_ready(): - print(f'We have logged in as {bot.user}') + print(f"We have logged in as {bot.user}") + @bot.event async def on_member_join(member): @@ -32,27 +35,28 @@ async def on_member_join(member): f"*Hello {member.name}, welcome to the server! This is a demo bot built with Honcho,* " "*implementing a naive user modeling method.* " "*To get started, just type a message in this channel and the bot will respond.* " - "*Over time, it will classify the \"state\" you're in and optimize conversations based on that state.* " + '*Over time, it will classify the "state" you\'re in and optimize conversations based on that state.* ' "*You can use the /restart command to restart the conversation at any time.* " "*If you have any questions or feedback, feel free to ask in the #honcho channel.* " "*Enjoy!*" ) - - + + @bot.event async def on_message(message): if message.author == bot.user or message.guild is not None: return user_id = f"discord_{str(message.author.id)}" - location_id=str(message.channel.id) + user = honcho.get_or_create_user(user_id) + location_id = str(message.channel.id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + sessions = list(user.get_sessions_generator(location_id)) if len(sessions) > 0: session = sessions[0] else: - session = honcho.create_session(user_id, location_id) + session = user.create_session(location_id) history = list(session.get_messages_generator())[:5] chat_history = langchain_message_converter(history) @@ -65,27 +69,29 @@ async def on_message(message): chat_history=chat_history, user_message=user_message, session=session, - input=inp + input=inp, ) await message.channel.send(response) session.create_message(is_user=False, content=response) + @bot.event async def on_reaction_add(reaction, user): # Ensure the bot does not react to its own reactions if user == bot.user: return - + user_id = f"discord_{str(reaction.message.author.id)}" + user = honcho.get_or_create_user(user_id) location_id = str(reaction.message.channel.id) # Check if the reaction is a thumbs up - if str(reaction.emoji) == '👍': + if str(reaction.emoji) == "👍": thumbs_up_messages.append(reaction.message.content) print(f"Added to thumbs up: {reaction.message.content}") # Check if the reaction is a thumbs down - elif str(reaction.emoji) == '👎': + elif str(reaction.emoji) == "👎": thumbs_down_messages.append(reaction.message.content) print(f"Added to thumbs down: {reaction.message.content}") @@ -94,16 +100,28 @@ async def on_reaction_add(reaction, user): # example = Example(chat_input=chat_input, assessment_dimension=user_state, response=response).with_inputs('chat_input') # examples.append(example) # user_state_storage[user_state]["examples"] = examples + example = Example( + chat_input=chat_input, assessment_dimension=user_state, response=response + ).with_inputs("chat_input") + user_state_storage = dict(user.metadata) + examples = user_state_storage.get("examples", []) + examples.append(example) + user_state_storage["examples"] = examples + user.update(metadata=user_state_storage) -@bot.slash_command(name = "restart", description = "Restart the Conversation") +@bot.slash_command(name="restart", description="Restart the Conversation") async def restart(ctx): - user_id=f"discord_{str(ctx.author.id)}" - location_id=str(ctx.channel_id) - sessions = list(honcho.get_sessions_generator(user_id, location_id)) + user_id = f"discord_{str(ctx.author.id)}" + user = honcho.get_or_create_user(user_id) + location_id = str(ctx.channel_id) + sessions = list(user.get_sessions_generator(location_id)) sessions[0].close() if len(sessions) > 0 else None - msg = "Great! The conversation has been restarted. What would you like to talk about?" + msg = ( + "Great! The conversation has been restarted. What would you like to talk about?" + ) await ctx.respond(msg) + bot.run(os.environ["BOT_TOKEN"]) diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index 9295a43..021ee00 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -16,46 +16,68 @@ dspy.settings.configure(lm=dspy_gpt4) - # DSPy Signatures class Thought(dspy.Signature): """Generate a thought about the user's needs""" + user_input = dspy.InputField() thought = dspy.OutputField(desc="a prediction about the user's mental state") + class Response(dspy.Signature): """Generate a response for the user based on the thought provided""" + user_input = dspy.InputField() thought = dspy.InputField() response = dspy.OutputField(desc="keep the conversation going, be engaging") + # DSPy Module class ChatWithThought(dspy.Module): generate_thought = dspy.Predict(Thought) generate_response = dspy.Predict(Response) - def forward(self, chat_input: str, user_message: Optional[Message] = None, session: Optional[Session] = None): + def forward( + self, + chat_input: str, + user_message: Optional[Message] = None, + session: Optional[Session] = None, + ): # call the thought predictor thought = self.generate_thought(user_input=chat_input) - + if session and user_message: - session.create_metamessage(user_message, metamessage_type="thought", content=thought.thought) + session.create_metamessage( + user_message, metamessage_type="thought", content=thought.thought + ) # call the response predictor - response = self.generate_response(user_input=chat_input, thought=thought.thought) + response = self.generate_response( + user_input=chat_input, thought=thought.thought + ) # remove ai prefix response = response.response.replace("ai:", "").strip() return response - -user_state_storage = {} -async def chat(user_message: Message, session: Session, chat_history: List[Message], input: str, optimization_threshold=3): + + +# user_state_storage = {} +async def chat( + user_message: Message, + session: Session, + chat_history: List[Message], + input: str, + optimization_threshold=3, +): + user_state_storage = dict(session.user.metadata) # first we need to see if the user has any existing states existing_states = list(user_state_storage.keys()) - + # then we need to take the user input and determine the user's state/dimension/persona - is_state_new, user_state = await StateExtractor.generate_state(existing_states=existing_states, chat_history=chat_history, input=input) + is_state_new, user_state = await StateExtractor.generate_state( + existing_states=existing_states, chat_history=chat_history, input=input + ) print(f"USER STATE: {user_state}") print(f"IS STATE NEW: {is_state_new}") @@ -64,10 +86,7 @@ async def chat(user_message: Message, session: Session, chat_history: List[Messa # TODO: you'd want to initialize user state object from Honcho # Save the user_state if it's new if is_state_new: - user_state_storage[user_state] = { - "chat_module": {}, - "examples": [] - } + user_state_storage[user_state] = {"chat_module": {}, "examples": []} user_state_data = user_state_storage[user_state] @@ -75,22 +94,28 @@ async def chat(user_message: Message, session: Session, chat_history: List[Messa # TODO: read in examples from Honcho User Object examples = user_state_data["examples"] print(f"Num examples: {len(examples)}") - + if len(examples) >= optimization_threshold: # Optimize chat module optimizer = BootstrapFewShot(metric=metric) compiled_chat_module = optimizer.compile(user_chat_module, trainset=examples) - user_state_data["chat_module"] = compiled_chat_module.dump_state() + # user_state_data["chat_module"] = compiled_chat_module.dump_state() + user_state_storage[user_state][ + "chat_module" + ] = compiled_chat_module.dump_state() user_chat_module = compiled_chat_module # save to file for debugging purposes # compiled_chat_module.save("module.json") - + # Update User in Honcho + session.user.update(metadata=user_state_storage) # use that pipeline to generate a response chat_input = format_chat_history(chat_history, user_input=input) - response = user_chat_module(user_message=user_message, session=session, chat_input=chat_input) + response = user_chat_module( + user_message=user_message, session=session, chat_input=chat_input + ) dspy_gpt4.inspect_history(n=2) return response diff --git a/sdk/CHANGELOG.md b/sdk/CHANGELOG.md index 54965e4..ace34de 100644 --- a/sdk/CHANGELOG.md +++ b/sdk/CHANGELOG.md @@ -5,6 +5,23 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## [0.0.4] — 2024-02-22 + +### Added + +* A User object for global user level metadat and more object oriented interface +* Reverse Pagination support to get recent messages, sessions, etc. more easily +* Linting Rules + +### Changed + +* Get sessions method returns all sessions including inactive +* using timestampz instead of timestamp +* `Client` renamed to `Honcho` +* `Honcho` takes in `app_name` instead of `app_id`. `app_name` needs to be a + unique identifier +* `Honcho` object requires an `initialize()` call to be used + ## [0.0.3] — 2024-02-15 diff --git a/sdk/README.md b/sdk/README.md index d2fedf5..31859fb 100644 --- a/sdk/README.md +++ b/sdk/README.md @@ -31,12 +31,13 @@ by default if no other string is provided. ```python from uuid import uuid4 -from honcho import Client as HonchoClient +from honcho import Honcho -app_id = str(uuid4()) -honcho = HonchoClient(app_id=app_id) -user_id = "test" -session = honcho.create_session(user_id=user_id) +app_name = str(uuid4()) +honcho = Honcho(app_name=app_name) +user_name = "test" +user = honcho.create_user(user_name) +session = user.create_session() session.create_message(is_user=True, content="Hello I'm a human") diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index b744991..09037f5 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -295,7 +295,24 @@ async def initialize(self): @property def base_url(self): """Shorcut for common API prefix. made a property to prevent tampering""" - return f"{self.server_url}/apps/{self.app_id}/users" + return f"{self.server_url}/apps/{self.app_id}" + + async def update(self, metadata: dict): + """Update the metadata of the app associated with this instance of the Honcho + client + + Args: + metadata (dict): The metadata to update + + Returns: + boolean: Whether the metadata was successfully updated + """ + data = {"metadata": metadata} + url = f"{self.base_url}" + response = await self.client.put(url, json=data) + success = response.status_code < 400 + self.metadata = metadata + return success async def create_user(self, name: str, metadata: Optional[dict] = None): """Create a new user by name @@ -309,7 +326,7 @@ async def create_user(self, name: str, metadata: Optional[dict] = None): """ if metadata is None: metadata = {} - url = f"{self.base_url}" + url = f"{self.base_url}/users" response = await self.client.post( url, json={"name": name, "metadata": metadata} ) @@ -331,7 +348,7 @@ async def get_user(self, name: str): Returns: AsyncUser: The User object """ - url = f"{self.base_url}/{name}" + url = f"{self.base_url}/users/{name}" response = await self.client.get(url) response.raise_for_status() data = response.json() @@ -351,7 +368,7 @@ async def get_or_create_user(self, name: str): Returns: AsyncUser: The User object """ - url = f"{self.base_url}/get_or_create/{name}" + url = f"{self.base_url}/users/get_or_create/{name}" response = await self.client.get(url) response.raise_for_status() data = response.json() @@ -370,7 +387,7 @@ async def get_users( Returns: AsyncGetUserPage: Paginated list of users """ - url = f"{self.base_url}?page={page}&size={page_size}&reverse={reverse}" + url = f"{self.base_url}/users?page={page}&size={page_size}&reverse={reverse}" response = await self.client.get(url) response.raise_for_status() data = response.json() @@ -439,13 +456,13 @@ def __init__( @property def base_url(self): """Shortcut for common API prefix. made a property to prevent tampering""" - return f"{self.honcho.base_url}/{self.id}" + return f"{self.honcho.base_url}/users/{self.id}" def __str__(self): """String representation of User""" return f"AsyncUser(id={self.id}, app_id={self.honcho.app_id}, metadata={self.metadata})" # noqa: E501 - async def update_user(self, metadata: dict): + async def update(self, metadata: dict): """Updates a user's metadata Args: diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index ec05621..f431604 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -295,7 +295,24 @@ def initialize(self): @property def base_url(self): """Shorcut for common API prefix. made a property to prevent tampering""" - return f"{self.server_url}/apps/{self.app_id}/users" + return f"{self.server_url}/apps/{self.app_id}" + + def update(self, metadata: dict): + """Update the metadata of the app associated with this instance of the Honcho + client + + Args: + metadata (dict): The metadata to update + + Returns: + boolean: Whether the metadata was successfully updated + """ + data = {"metadata": metadata} + url = f"{self.base_url}" + response = self.client.put(url, json=data) + success = response.status_code < 400 + self.metadata = metadata + return success def create_user(self, name: str, metadata: Optional[dict] = None): """Create a new user by name @@ -309,7 +326,7 @@ def create_user(self, name: str, metadata: Optional[dict] = None): """ if metadata is None: metadata = {} - url = f"{self.base_url}" + url = f"{self.base_url}/users" response = self.client.post( url, json={"name": name, "metadata": metadata} ) @@ -331,7 +348,7 @@ def get_user(self, name: str): Returns: User: The User object """ - url = f"{self.base_url}/{name}" + url = f"{self.base_url}/users/{name}" response = self.client.get(url) response.raise_for_status() data = response.json() @@ -351,7 +368,7 @@ def get_or_create_user(self, name: str): Returns: User: The User object """ - url = f"{self.base_url}/get_or_create/{name}" + url = f"{self.base_url}/users/get_or_create/{name}" response = self.client.get(url) response.raise_for_status() data = response.json() @@ -370,7 +387,7 @@ def get_users( Returns: GetUserPage: Paginated list of users """ - url = f"{self.base_url}?page={page}&size={page_size}&reverse={reverse}" + url = f"{self.base_url}/users?page={page}&size={page_size}&reverse={reverse}" response = self.client.get(url) response.raise_for_status() data = response.json() @@ -439,13 +456,13 @@ def __init__( @property def base_url(self): """Shortcut for common API prefix. made a property to prevent tampering""" - return f"{self.honcho.base_url}/{self.id}" + return f"{self.honcho.base_url}/users/{self.id}" def __str__(self): """String representation of User""" return f"User(id={self.id}, app_id={self.honcho.app_id}, metadata={self.metadata})" # noqa: E501 - def update_user(self, metadata: dict): + def update(self, metadata: dict): """Updates a user's metadata Args: From c584fad11644f83cfafcbc415bf9448bd5f0874d Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 22 Feb 2024 09:33:34 -0800 Subject: [PATCH 41/46] Add is_active filtering --- api/src/crud.py | 5 ++++- api/src/main.py | 8 +++++++- sdk/honcho/client.py | 25 ++++++++++++++++++------- sdk/honcho/sync_client.py | 25 ++++++++++++++++++------- 4 files changed, 47 insertions(+), 16 deletions(-) diff --git a/api/src/crud.py b/api/src/crud.py index 5e954e6..10f24e8 100644 --- a/api/src/crud.py +++ b/api/src/crud.py @@ -169,15 +169,18 @@ def get_sessions( user_id: uuid.UUID, location_id: Optional[str] = None, reverse: Optional[bool] = False, + is_active: Optional[bool] = False, ) -> Select: stmt = ( select(models.Session) .join(models.User, models.User.id == models.Session.user_id) .where(models.User.app_id == app_id) .where(models.Session.user_id == user_id) - # .where(models.Session.is_active.is_(True)) ) + if is_active: + stmt = stmt.where(models.Session.is_active.is_(True)) + if reverse: stmt = stmt.order_by(models.Session.created_at.desc()) else: diff --git a/api/src/main.py b/api/src/main.py index 0f19470..4784bc3 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -265,6 +265,7 @@ def get_sessions( app_id: uuid.UUID, user_id: uuid.UUID, location_id: Optional[str] = None, + is_active: Optional[bool] = False, reverse: Optional[bool] = False, db: Session = Depends(get_db), ): @@ -282,7 +283,12 @@ def get_sessions( return paginate( db, crud.get_sessions( - db, app_id=app_id, user_id=user_id, location_id=location_id, reverse=reverse + db, + app_id=app_id, + user_id=user_id, + location_id=location_id, + reverse=reverse, + is_active=is_active, ), ) diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index 09037f5..b75e203 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -36,9 +36,9 @@ def __init__(self, response: dict, honcho: AsyncHoncho, reverse: bool): """Constructor for Page Result from User Get Request Args: + response (dict): Response from API with pagination information honcho (AsyncHoncho): Honcho Client reverse (bool): Whether to reverse the order of the results or not - response (dict): Response from API with pagination information """ super().__init__(response) self.honcho = honcho @@ -65,20 +65,26 @@ class AsyncGetSessionPage(AsyncGetPage): """Paginated Results for Get Session Requests""" def __init__( - self, response: dict, user: AsyncUser, reverse: bool, location_id: Optional[str] + self, + response: dict, + user: AsyncUser, + reverse: bool, + location_id: Optional[str], + is_active: bool, ): """Constructor for Page Result from Session Get Request Args: + response (dict): Response from API with pagination information user (AsyncUser): Honcho User associated with the session - location_id (str): ID of the location associated with the session reverse (bool): Whether to reverse the order of the results or not - response (dict): Response from API with pagination information + location_id (str): ID of the location associated with the session """ super().__init__(response) self.user = user self.location_id = location_id self.reverse = reverse + self.is_active = is_active self.items = [ AsyncSession( user=user, @@ -104,6 +110,7 @@ async def next(self): page=(self.page + 1), page_size=self.page_size, reverse=self.reverse, + is_active=self.is_active, ) @@ -114,8 +121,9 @@ def __init__(self, response: dict, session: AsyncSession, reverse: bool): """Constructor for Page Result from Session Get Request Args: - session (AsyncSession): Session the returned messages are associated with response (dict): Response from API with pagination information + session (AsyncSession): Session the returned messages are associated with + reverse (bool): Whether to reverse the order of the results or not """ super().__init__(response) self.session = session @@ -160,6 +168,8 @@ def __init__( session (AsyncSession): Session the returned messages are associated with reverse (bool): Whether to reverse the order of the results + message_id (Optional[str]): ID of the message associated with the + metamessage_type (Optional[str]): Type of the metamessage """ super().__init__(response) self.session = session @@ -508,6 +518,7 @@ async def get_sessions( page: int = 1, page_size: int = 50, reverse: bool = False, + is_active: bool = False, ): """Return sessions associated with a user paginated @@ -522,13 +533,13 @@ async def get_sessions( """ url = ( - f"{self.base_url}/sessions?page={page}&size={page_size}&reverse={reverse}" + f"{self.base_url}/sessions?page={page}&size={page_size}&reverse={reverse}&is_active={is_active}" + (f"&location_id={location_id}" if location_id else "") ) response = await self.honcho.client.get(url) response.raise_for_status() data = response.json() - return AsyncGetSessionPage(data, self, reverse, location_id) + return AsyncGetSessionPage(data, self, reverse, location_id, is_active) async def get_sessions_generator( self, diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index f431604..3c2f950 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -36,9 +36,9 @@ def __init__(self, response: dict, honcho: Honcho, reverse: bool): """Constructor for Page Result from User Get Request Args: + response (dict): Response from API with pagination information honcho (Honcho): Honcho Client reverse (bool): Whether to reverse the order of the results or not - response (dict): Response from API with pagination information """ super().__init__(response) self.honcho = honcho @@ -65,20 +65,26 @@ class GetSessionPage(GetPage): """Paginated Results for Get Session Requests""" def __init__( - self, response: dict, user: User, reverse: bool, location_id: Optional[str] + self, + response: dict, + user: User, + reverse: bool, + location_id: Optional[str], + is_active: bool, ): """Constructor for Page Result from Session Get Request Args: + response (dict): Response from API with pagination information user (User): Honcho User associated with the session - location_id (str): ID of the location associated with the session reverse (bool): Whether to reverse the order of the results or not - response (dict): Response from API with pagination information + location_id (str): ID of the location associated with the session """ super().__init__(response) self.user = user self.location_id = location_id self.reverse = reverse + self.is_active = is_active self.items = [ Session( user=user, @@ -104,6 +110,7 @@ def next(self): page=(self.page + 1), page_size=self.page_size, reverse=self.reverse, + is_active=self.is_active, ) @@ -114,8 +121,9 @@ def __init__(self, response: dict, session: Session, reverse: bool): """Constructor for Page Result from Session Get Request Args: - session (Session): Session the returned messages are associated with response (dict): Response from API with pagination information + session (Session): Session the returned messages are associated with + reverse (bool): Whether to reverse the order of the results or not """ super().__init__(response) self.session = session @@ -160,6 +168,8 @@ def __init__( session (Session): Session the returned messages are associated with reverse (bool): Whether to reverse the order of the results + message_id (Optional[str]): ID of the message associated with the + metamessage_type (Optional[str]): Type of the metamessage """ super().__init__(response) self.session = session @@ -508,6 +518,7 @@ def get_sessions( page: int = 1, page_size: int = 50, reverse: bool = False, + is_active: bool = False, ): """Return sessions associated with a user paginated @@ -522,13 +533,13 @@ def get_sessions( """ url = ( - f"{self.base_url}/sessions?page={page}&size={page_size}&reverse={reverse}" + f"{self.base_url}/sessions?page={page}&size={page_size}&reverse={reverse}&is_active={is_active}" + (f"&location_id={location_id}" if location_id else "") ) response = self.honcho.client.get(url) response.raise_for_status() data = response.json() - return GetSessionPage(data, self, reverse, location_id) + return GetSessionPage(data, self, reverse, location_id, is_active) def get_sessions_generator( self, From 67b16019327b02fba1e0edd6d93f7a7b3957bddf Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 22 Feb 2024 09:34:51 -0800 Subject: [PATCH 42/46] Add is_active filtering to the generator --- sdk/honcho/client.py | 3 ++- sdk/honcho/sync_client.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index b75e203..5da3740 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -545,6 +545,7 @@ async def get_sessions_generator( self, location_id: Optional[str] = None, reverse: bool = False, + is_active: bool = False, ): """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app @@ -560,7 +561,7 @@ async def get_sessions_generator( page = 1 page_size = 50 get_session_response = await self.get_sessions( - location_id, page, page_size, reverse + location_id, page, page_size, reverse, is_active ) while True: for session in get_session_response.items: diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 3c2f950..2db7423 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -545,6 +545,7 @@ def get_sessions_generator( self, location_id: Optional[str] = None, reverse: bool = False, + is_active: bool = False, ): """Shortcut Generator for get_sessions. Generator to iterate through all sessions for a user in an app @@ -560,7 +561,7 @@ def get_sessions_generator( page = 1 page_size = 50 get_session_response = self.get_sessions( - location_id, page, page_size, reverse + location_id, page, page_size, reverse, is_active ) while True: for session in get_session_response.items: From 876e8e895959c4fe20fe0e4cd59f9470b43e7834 Mon Sep 17 00:00:00 2001 From: Vineeth Voruganti <13438633+VVoruganti@users.noreply.github.com> Date: Thu, 22 Feb 2024 10:59:37 -0800 Subject: [PATCH 43/46] Fix update user metadata --- sdk/honcho/client.py | 15 +++++++++++++-- sdk/honcho/sync_client.py | 15 +++++++++++++-- sdk/tests/test_async.py | 13 +++++++++++++ sdk/tests/test_sync.py | 12 ++++++++++++ 4 files changed, 51 insertions(+), 4 deletions(-) diff --git a/sdk/honcho/client.py b/sdk/honcho/client.py index 5da3740..2d25ee0 100644 --- a/sdk/honcho/client.py +++ b/sdk/honcho/client.py @@ -394,6 +394,11 @@ async def get_users( ): """Get Paginated list of users + Args: + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return + reverse (bool): Whether to reverse the order of the results + Returns: AsyncGetUserPage: Paginated list of users """ @@ -482,11 +487,14 @@ async def update(self, metadata: dict): AsyncUser: The updated User object """ + data = {"metadata": metadata} url = f"{self.base_url}" - response = await self.honcho.client.put(url, json=metadata) + response = await self.honcho.client.put(url, json=data) response.raise_for_status() + success = response.status_code < 400 data = response.json() self.metadata = data["metadata"] + return success # return AsyncUser(self.honcho, **data) async def get_session(self, session_id: uuid.UUID): @@ -527,6 +535,8 @@ async def get_sessions( location of a session page (int, optional): The page of results to return page_size (int, optional): The number of results to return + reverse (bool): Whether to reverse the order of the results + is_active (bool): Whether to only return active sessions Returns: AsyncGetSessionPage: Page or results for get_sessions query @@ -553,6 +563,8 @@ async def get_sessions_generator( Args: location_id (str, optional): Optional Location ID representing the location of a session + reverse (bool): Whether to reverse the order of the results + is_active (bool): Whether to only return active sessions Yields: AsyncSession: The Session object of the requested Session @@ -1029,7 +1041,6 @@ async def create_document(self, content: str, metadata: Optional[dict] = None): metadata = {} data = {"metadata": metadata, "content": content} url = f"{self.base_url}/documents" - print(url) response = await self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() diff --git a/sdk/honcho/sync_client.py b/sdk/honcho/sync_client.py index 2db7423..4893f80 100644 --- a/sdk/honcho/sync_client.py +++ b/sdk/honcho/sync_client.py @@ -394,6 +394,11 @@ def get_users( ): """Get Paginated list of users + Args: + page (int, optional): The page of results to return + page_size (int, optional): The number of results to return + reverse (bool): Whether to reverse the order of the results + Returns: GetUserPage: Paginated list of users """ @@ -482,11 +487,14 @@ def update(self, metadata: dict): User: The updated User object """ + data = {"metadata": metadata} url = f"{self.base_url}" - response = self.honcho.client.put(url, json=metadata) + response = self.honcho.client.put(url, json=data) response.raise_for_status() + success = response.status_code < 400 data = response.json() self.metadata = data["metadata"] + return success # return User(self.honcho, **data) def get_session(self, session_id: uuid.UUID): @@ -527,6 +535,8 @@ def get_sessions( location of a session page (int, optional): The page of results to return page_size (int, optional): The number of results to return + reverse (bool): Whether to reverse the order of the results + is_active (bool): Whether to only return active sessions Returns: GetSessionPage: Page or results for get_sessions query @@ -553,6 +563,8 @@ def get_sessions_generator( Args: location_id (str, optional): Optional Location ID representing the location of a session + reverse (bool): Whether to reverse the order of the results + is_active (bool): Whether to only return active sessions Yields: Session: The Session object of the requested Session @@ -1029,7 +1041,6 @@ def create_document(self, content: str, metadata: Optional[dict] = None): metadata = {} data = {"metadata": metadata, "content": content} url = f"{self.base_url}/documents" - print(url) response = self.user.honcho.client.post(url, json=data) response.raise_for_status() data = response.json() diff --git a/sdk/tests/test_async.py b/sdk/tests/test_async.py index 0c52875..04ffc4a 100644 --- a/sdk/tests/test_async.py +++ b/sdk/tests/test_async.py @@ -15,6 +15,19 @@ from honcho import AsyncHoncho as Honcho +@pytest.mark.asyncio +async def test_user_update(): + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + await honcho.initialize() + user = await honcho.create_user(user_name) + assert user.metadata == {} + assert await user.update({"foo": "bar"}) + retrieved_user = await honcho.get_user(user_name) + assert retrieved_user.metadata == {"foo": "bar"} + + @pytest.mark.asyncio async def test_session_creation_retrieval(): app_name = str(uuid1()) diff --git a/sdk/tests/test_sync.py b/sdk/tests/test_sync.py index 16eba65..fd92234 100644 --- a/sdk/tests/test_sync.py +++ b/sdk/tests/test_sync.py @@ -15,6 +15,18 @@ from honcho import Honcho as Honcho +def test_user_update(): + user_name = str(uuid1()) + app_name = str(uuid1()) + honcho = Honcho(app_name, "http://localhost:8000") + honcho.initialize() + user = honcho.create_user(user_name) + assert user.metadata == {} + assert user.update({"foo": "bar"}) + retrieved_user = honcho.get_user(user_name) + assert retrieved_user.metadata == {"foo": "bar"} + + def test_session_creation_retrieval(): app_name = str(uuid1()) honcho = Honcho(app_name, "http://localhost:8000") From cf79652fa5eca73dc060b04ce4820af7f7fa5ab6 Mon Sep 17 00:00:00 2001 From: vintro Date: Thu, 22 Feb 2024 20:03:45 -0500 Subject: [PATCH 44/46] working, but weird compiler error --- example/discord/honcho-dspy-personas/bot.py | 65 ++++++++++++------- example/discord/honcho-dspy-personas/chain.py | 3 + example/discord/honcho-dspy-personas/graph.py | 28 ++++++-- .../langchain_prompts/state_check.yaml | 4 +- .../langchain_prompts/state_commentary.yaml | 6 +- .../langchain_prompts/state_labeling.yaml | 10 +-- .../discord/honcho-dspy-personas/poetry.lock | 50 +++++++------- .../honcho-dspy-personas/pyproject.toml | 2 +- .../honcho-dspy-personas/response_metric.py | 53 +++++++++++---- 9 files changed, 145 insertions(+), 76 deletions(-) diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index 74536ed..a385ae3 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -3,6 +3,7 @@ import discord from honcho import Honcho from graph import chat +from dspy import Example from chain import langchain_message_converter intents = discord.Intents.default() @@ -14,8 +15,8 @@ # app_id = str(uuid1()) app_name = "vince-dspy-personas" -# honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local -honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev +honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local +# honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev honcho.initialize() bot = discord.Bot(intents=intents) @@ -51,7 +52,7 @@ async def on_message(message): user = honcho.get_or_create_user(user_id) location_id = str(message.channel.id) - sessions = list(user.get_sessions_generator(location_id)) + sessions = list(user.get_sessions_generator(location_id, is_active=True, reverse=True)) if len(sessions) > 0: session = sessions[0] @@ -82,32 +83,48 @@ async def on_reaction_add(reaction, user): if user == bot.user: return - user_id = f"discord_{str(reaction.message.author.id)}" - user = honcho.get_or_create_user(user_id) + user_id = f"discord_{str(user.id)}" + honcho_user = honcho.get_or_create_user(user_id) location_id = str(reaction.message.channel.id) + sessions = list(honcho_user.get_sessions_generator(location_id, is_active=True, reverse=True)) + if len(sessions) > 0: + session = sessions[0] + else: + session = honcho_user.create_session(location_id) + + messages = list(session.get_messages_generator(reverse=True)) + ai_responses = [message for message in messages if not message.is_user] + user_responses = [message for message in messages if message.is_user] + # most recent AI response + ai_response = ai_responses[0].content + user_response = user_responses[0] + + user_state_storage = dict(honcho_user.metadata) + user_state = list(session.get_metamessages_generator(metamessage_type="user_state", message=user_response, reverse=True))[0].content + examples = user_state_storage[user_state]["examples"] + # Check if the reaction is a thumbs up if str(reaction.emoji) == "👍": - thumbs_up_messages.append(reaction.message.content) - print(f"Added to thumbs up: {reaction.message.content}") + example = Example( + chat_input=user_response.content, + response=ai_response, + assessment_dimension=user_state, + label='yes' + ).with_inputs("chat_input", "response", "assessment_dimension") + examples.append(example.toDict()) # Check if the reaction is a thumbs down elif str(reaction.emoji) == "👎": - thumbs_down_messages.append(reaction.message.content) - print(f"Added to thumbs down: {reaction.message.content}") - - # TODO: we need to append these to the examples list within the user state json object - # append example - # example = Example(chat_input=chat_input, assessment_dimension=user_state, response=response).with_inputs('chat_input') - # examples.append(example) - # user_state_storage[user_state]["examples"] = examples - example = Example( - chat_input=chat_input, assessment_dimension=user_state, response=response - ).with_inputs("chat_input") - user_state_storage = dict(user.metadata) - examples = user_state_storage.get("examples", []) - examples.append(example) - user_state_storage["examples"] = examples - user.update(metadata=user_state_storage) + example = Example( + chat_input=user_response.content, + response=ai_response, + assessment_dimension=user_state, + label='no' + ).with_inputs("chat_input", "response", "assessment_dimension") + examples.append(example.toDict()) + + user_state_storage[user_state]["examples"] = examples + honcho_user.update(metadata=user_state_storage) @bot.slash_command(name="restart", description="Restart the Conversation") @@ -115,7 +132,7 @@ async def restart(ctx): user_id = f"discord_{str(ctx.author.id)}" user = honcho.get_or_create_user(user_id) location_id = str(ctx.channel_id) - sessions = list(user.get_sessions_generator(location_id)) + sessions = list(user.get_sessions_generator(location_id, reverse=True)) sessions[0].close() if len(sessions) > 0 else None msg = ( diff --git a/example/discord/honcho-dspy-personas/chain.py b/example/discord/honcho-dspy-personas/chain.py index aa114e1..119d33b 100644 --- a/example/discord/honcho-dspy-personas/chain.py +++ b/example/discord/honcho-dspy-personas/chain.py @@ -1,11 +1,14 @@ import os from typing import List, Union +from dotenv import load_dotenv from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, load_prompt from langchain_core.messages import AIMessage, HumanMessage from honcho import Message +load_dotenv() + # langchain prompts SYSTEM_STATE_COMMENTARY = load_prompt(os.path.join(os.path.dirname(__file__), 'langchain_prompts/state_commentary.yaml')) SYSTEM_STATE_LABELING = load_prompt(os.path.join(os.path.dirname(__file__), 'langchain_prompts/state_labeling.yaml')) diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index 021ee00..b3b17ba 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -2,7 +2,7 @@ import dspy from dspy import Example from typing import List, Optional -from dspy.teleprompt import BootstrapFewShot +from dspy.teleprompt import BootstrapFewShotWithRandomSearch from dotenv import load_dotenv from chain import StateExtractor, format_chat_history from response_metric import metric @@ -42,6 +42,7 @@ def forward( chat_input: str, user_message: Optional[Message] = None, session: Optional[Session] = None, + assessment_dimension = None, ): # call the thought predictor thought = self.generate_thought(user_input=chat_input) @@ -56,13 +57,9 @@ def forward( user_input=chat_input, thought=thought.thought ) - # remove ai prefix - response = response.response.replace("ai:", "").strip() - return response -# user_state_storage = {} async def chat( user_message: Message, session: Session, @@ -81,6 +78,12 @@ async def chat( print(f"USER STATE: {user_state}") print(f"IS STATE NEW: {is_state_new}") + # add metamessage to message to keep track of what label got assigned to what message + if session and user_message: + session.create_metamessage( + user_message, metamessage_type="user_state", content=user_state + ) + user_chat_module = ChatWithThought() # TODO: you'd want to initialize user state object from Honcho @@ -94,16 +97,27 @@ async def chat( # TODO: read in examples from Honcho User Object examples = user_state_data["examples"] print(f"Num examples: {len(examples)}") + session.user.update(metadata=user_state_storage) if len(examples) >= optimization_threshold: + # convert example from dicts to dspy Example objects + examples = [dspy.Example(**example).with_inputs("chat_input", "ai_response", "assessment_dimension") for example in examples] + print(examples) + # Splitting the examples list into train and validation sets + # train_examples = examples[:-1] # All but the last item for training + # val_examples = examples[-1:] # The last item for validation + # Optimize chat module - optimizer = BootstrapFewShot(metric=metric) + optimizer = BootstrapFewShotWithRandomSearch(metric=metric, max_bootstrapped_demos=3, max_labeled_demos=3, num_candidate_programs=10, num_threads=4) + # compiled_chat_module = optimizer.compile(ChatWithThought(), trainset=train_examples, valset=val_examples) compiled_chat_module = optimizer.compile(user_chat_module, trainset=examples) + print(f"COMPILED_CHAT_MODULE: {compiled_chat_module}") # user_state_data["chat_module"] = compiled_chat_module.dump_state() user_state_storage[user_state][ "chat_module" ] = compiled_chat_module.dump_state() + print(f"DUMPED_STATE: {compiled_chat_module.dump_state()}") user_chat_module = compiled_chat_module # save to file for debugging purposes @@ -116,6 +130,8 @@ async def chat( response = user_chat_module( user_message=user_message, session=session, chat_input=chat_input ) + # remove ai prefix + response = response.response.replace("ai:", "").strip() dspy_gpt4.inspect_history(n=2) return response diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml index 6fe2f0f..d997adb 100644 --- a/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_check.yaml @@ -4,7 +4,7 @@ input_variables: template: > Given the list of existing states, determine whether or not the new state is represented in the list of existing states. - existing states: ```{existing_states}``` - new state: ```{state}``` + existing states: """{existing_states}""" + new state: """{state}""" If the new state is sufficiently similar to a value in the list of existing states, return that existing state value. If the new state is NOT sufficiently similar to anything in existing states, return "None". Output a single value only. \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml index f1e2b90..bd4ee0c 100644 --- a/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_commentary.yaml @@ -4,6 +4,6 @@ input_variables: template: > Your job is to make a prediction about the task the user might be engaging in. Some people might be researching, exploring curiosities, or just asking questions for general inquiry. Provide commentary that would shed light on the "mode" the user might be in. - existing states: ```{existing_states}``` - chat history: ```{chat_history}``` - user input: ```{user_input}``` \ No newline at end of file + existing states: """{existing_states}""" + chat history: """{chat_history}""" + user input: """{user_input}""" \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml b/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml index c3dd8fb..a2e3105 100644 --- a/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml +++ b/example/discord/honcho-dspy-personas/langchain_prompts/state_labeling.yaml @@ -4,10 +4,10 @@ input_variables: template: > Your job is to label the state the user might be in. Some people might be conducting research, exploring a interest, or just asking questions for general inquiry. - commentary: ```{state_commentary}``` - Prior states, from oldest to most recent: ``` + commentary: """{state_commentary}""" + Prior states, from oldest to most recent:""" {existing_states} - ```` - - Take into account the user's prior states when making your prediction. Output your prediction as a concise, single word label. + """ + Take into account the user's prior states when making your prediction. Output your prediction as a concise, single word label. + \ No newline at end of file diff --git a/example/discord/honcho-dspy-personas/poetry.lock b/example/discord/honcho-dspy-personas/poetry.lock index 7329a15..589f511 100644 --- a/example/discord/honcho-dspy-personas/poetry.lock +++ b/example/discord/honcho-dspy-personas/poetry.lock @@ -154,13 +154,13 @@ files = [ [[package]] name = "anyio" -version = "4.2.0" +version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [package.dependencies] @@ -676,27 +676,29 @@ files = [ [[package]] name = "honcho-ai" -version = "0.0.3" +version = "0.0.4" description = "Python Client SDK for Honcho" optional = false -python-versions = ">=3.10,<4.0" -files = [ - {file = "honcho_ai-0.0.3-py3-none-any.whl", hash = "sha256:a817ec62c4fd8dad1d629927511ce98a3f626f4bc55474187b80010e208e61ba"}, - {file = "honcho_ai-0.0.3.tar.gz", hash = "sha256:ca52bb8c5036bfdbeee0c71ca754c580c672b28a4824240123b783f8679ca18e"}, -] +python-versions = "^3.10" +files = [] +develop = true [package.dependencies] -httpx = ">=0.26.0,<0.27.0" +httpx = "^0.26.0" + +[package.source] +type = "directory" +url = "../../../sdk" [[package]] name = "httpcore" -version = "1.0.3" +version = "1.0.4" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, - {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, + {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, + {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, ] [package.dependencies] @@ -707,7 +709,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.24.0)"] +trio = ["trio (>=0.22.0,<0.25.0)"] [[package]] name = "httpx" @@ -814,13 +816,13 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.24" +version = "0.1.25" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_core-0.1.24-py3-none-any.whl", hash = "sha256:1887bb2e0c12e0d94c1e805eb56d08dbb670232daf0906761f726bd507324319"}, - {file = "langchain_core-0.1.24.tar.gz", hash = "sha256:ce70f4b97695eb55637e00ee33d480fffc6db1f95726f99b076b55cb1a42927d"}, + {file = "langchain_core-0.1.25-py3-none-any.whl", hash = "sha256:ff0a0ad1ed877878e7b9c7601870cd12145abf3c814aae41995968d05ea6c09d"}, + {file = "langchain_core-0.1.25.tar.gz", hash = "sha256:065ff8b4e383c5645d175b20ae44b258330ed06457b0fc0179efee310b6f2af6"}, ] [package.dependencies] @@ -855,13 +857,13 @@ tiktoken = ">=0.5.2,<1" [[package]] name = "langsmith" -version = "0.1.3" +version = "0.1.5" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.1.3-py3-none-any.whl", hash = "sha256:b290f951d1ebff9abe2b52cc09d63acea75a9ca6e003a617310fb024eaf00f63"}, - {file = "langsmith-0.1.3.tar.gz", hash = "sha256:197bd1f5baa83db69a0eab644bab1eba8dcdf0c2d8b7c900a45916f7b3dd50ab"}, + {file = "langsmith-0.1.5-py3-none-any.whl", hash = "sha256:a1811821a923d90e53bcbacdd0988c3c366aff8f4c120d8777e7af8ecda06268"}, + {file = "langsmith-0.1.5.tar.gz", hash = "sha256:aa7a2861aa3d9ae563a077c622953533800466c4e2e539b0d567b84d5fd5b157"}, ] [package.dependencies] @@ -1950,13 +1952,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -2188,4 +2190,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "85bddbf515ca00359d2b25db6dc48e2943a136e3b65a9ed8a9537ad79bfd4eec" +content-hash = "f27a3578cc155c9eccf7d771fe3c47196500adaae5c92b0bbb55252ee338fa50" diff --git a/example/discord/honcho-dspy-personas/pyproject.toml b/example/discord/honcho-dspy-personas/pyproject.toml index 6f2082b..2a4144f 100644 --- a/example/discord/honcho-dspy-personas/pyproject.toml +++ b/example/discord/honcho-dspy-personas/pyproject.toml @@ -7,13 +7,13 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.11" -honcho-ai = "^0.0.3" dspy-ai = "^2.1.10" python-dotenv = "^1.0.1" langchain-core = "^0.1.23" langchain-openai = "^0.0.6" py-cord = "^2.4.1" langsmith = "^0.1.3" +honcho-ai = {path = "../../../sdk", develop = true} [build-system] diff --git a/example/discord/honcho-dspy-personas/response_metric.py b/example/discord/honcho-dspy-personas/response_metric.py index 1383c11..76bb078 100644 --- a/example/discord/honcho-dspy-personas/response_metric.py +++ b/example/discord/honcho-dspy-personas/response_metric.py @@ -5,29 +5,60 @@ class MessageResponseAssess(dspy.Signature): """Assess the quality of a response along the specified dimension.""" chat_input = dspy.InputField() + assessment_dimension = dspy.InputField() # user state + example_response = dspy.InputField() + example_label = dspy.InputField() ai_response = dspy.InputField() - gold_response = dspy.InputField() - assessment_dimension = dspy.InputField() - assessment_answer = dspy.OutputField(desc="Good or not") + ai_response_label = dspy.OutputField(desc="yes or no") -def metric(example, ai_response, trace=None): +def metric(example, pred, trace=None): """Assess the quality of a response along the specified dimension.""" - assessment_dimension = example.assessment_dimension chat_input = example.chat_input - gold_response = example.response + assessment_dimension = f"The user is in the following state: {example.assessment_dimension}. Is the AI response appropriate for this state? Respond with Yes or No." + example_response = example.response + example_label = example.label + ai_response = pred with dspy.context(lm=gpt4T): assessment_result = dspy.Predict(MessageResponseAssess)( chat_input=chat_input, - ai_response=ai_response, - gold_response=gold_response, - assessment_dimension=assessment_dimension + assessment_dimension=assessment_dimension, + example_response=example_response, + example_label=example_label, + ai_response=ai_response, ) - is_positive = assessment_result.assessment_answer.lower() == 'good' + is_appropriate = assessment_result.ai_response_label.lower() == 'yes' gpt4T.inspect_history(n=3) - return is_positive + return is_appropriate + + + + + + +# def metric(example, ai_response, trace=None): +# """Assess the quality of a response along the specified dimension.""" +# example = dspy.Example(**example).with_inputs("chat_input", "ai_response", "assessment_dimension") + +# label = example.label +# chat_input = example.chat_input +# ai_response = example.ai_response +# assessment_dimension = example.assessment_dimension + +# with dspy.context(lm=gpt4T): +# assessment_result = dspy.Predict(MessageResponseAssess)( +# chat_input=chat_input, +# ai_response=ai_response, +# assessment_dimension=assessment_dimension +# ) + +# is_positive = assessment_result.assessment_answer.lower() == 'positive' + +# gpt4T.inspect_history(n=3) + +# return is_positive \ No newline at end of file From 3a2f5ea229b21c7cba89ff4a82f707fe67d05ced Mon Sep 17 00:00:00 2001 From: vintro Date: Fri, 23 Feb 2024 13:33:06 -0500 Subject: [PATCH 45/46] fixed str error in optimizer --- example/discord/honcho-dspy-personas/graph.py | 30 +++++++++++++------ .../honcho-dspy-personas/response_metric.py | 16 ++++------ 2 files changed, 27 insertions(+), 19 deletions(-) diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index b3b17ba..4f53dae 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -42,7 +42,8 @@ def forward( chat_input: str, user_message: Optional[Message] = None, session: Optional[Session] = None, - assessment_dimension = None, + response: Optional[str] = None, + assessment_dimension: Optional[str] = None, ): # call the thought predictor thought = self.generate_thought(user_input=chat_input) @@ -57,7 +58,7 @@ def forward( user_input=chat_input, thought=thought.thought ) - return response + return response # this is a prediction object async def chat( @@ -65,7 +66,7 @@ async def chat( session: Session, chat_history: List[Message], input: str, - optimization_threshold=3, + optimization_threshold=5, ): user_state_storage = dict(session.user.metadata) # first we need to see if the user has any existing states @@ -101,16 +102,24 @@ async def chat( if len(examples) >= optimization_threshold: # convert example from dicts to dspy Example objects - examples = [dspy.Example(**example).with_inputs("chat_input", "ai_response", "assessment_dimension") for example in examples] - print(examples) + optimizer_examples = [] + for example in examples: + optimizer_example = Example(**example).with_inputs("chat_input", "response", "assessment_dimension") + optimizer_examples.append(optimizer_example) + print(isinstance(optimizer_example, Example)) + print(optimizer_example._store) + # examples = [dspy.Example(example).with_inputs("chat_input", "response", "assessment_dimension") for example in examples] + # print(examples) # Splitting the examples list into train and validation sets - # train_examples = examples[:-1] # All but the last item for training - # val_examples = examples[-1:] # The last item for validation + train_examples = examples[:-1] # All but the last item for training + val_examples = examples[-1:] # The last item for validation # Optimize chat module optimizer = BootstrapFewShotWithRandomSearch(metric=metric, max_bootstrapped_demos=3, max_labeled_demos=3, num_candidate_programs=10, num_threads=4) - # compiled_chat_module = optimizer.compile(ChatWithThought(), trainset=train_examples, valset=val_examples) - compiled_chat_module = optimizer.compile(user_chat_module, trainset=examples) + # optimizer = BootstrapFewShot(metric=metric, max_rounds=5) + + compiled_chat_module = optimizer.compile(ChatWithThought(), trainset=train_examples, valset=val_examples) + # compiled_chat_module = optimizer.compile(user_chat_module, trainset=optimizer_examples) print(f"COMPILED_CHAT_MODULE: {compiled_chat_module}") # user_state_data["chat_module"] = compiled_chat_module.dump_state() @@ -132,6 +141,9 @@ async def chat( ) # remove ai prefix response = response.response.replace("ai:", "").strip() + + print("========== CHAT HISTORY ==========") dspy_gpt4.inspect_history(n=2) + print("======= END CHAT HISTORY =========") return response diff --git a/example/discord/honcho-dspy-personas/response_metric.py b/example/discord/honcho-dspy-personas/response_metric.py index 76bb078..3d9a5f1 100644 --- a/example/discord/honcho-dspy-personas/response_metric.py +++ b/example/discord/honcho-dspy-personas/response_metric.py @@ -7,8 +7,6 @@ class MessageResponseAssess(dspy.Signature): chat_input = dspy.InputField() assessment_dimension = dspy.InputField() # user state example_response = dspy.InputField() - example_label = dspy.InputField() - ai_response = dspy.InputField() ai_response_label = dspy.OutputField(desc="yes or no") @@ -17,22 +15,20 @@ def metric(example, pred, trace=None): chat_input = example.chat_input assessment_dimension = f"The user is in the following state: {example.assessment_dimension}. Is the AI response appropriate for this state? Respond with Yes or No." - example_response = example.response - example_label = example.label - ai_response = pred + example_response = pred.response with dspy.context(lm=gpt4T): assessment_result = dspy.Predict(MessageResponseAssess)( chat_input=chat_input, assessment_dimension=assessment_dimension, - example_response=example_response, - example_label=example_label, - ai_response=ai_response, + example_response=example_response ) - + is_appropriate = assessment_result.ai_response_label.lower() == 'yes' - gpt4T.inspect_history(n=3) + print("======== OPTIMIZER HISTORY ========") + gpt4T.inspect_history(n=5) + print("======== END OPTIMIZER HISTORY ========") return is_appropriate From 21c325bd355ee630afcefd2266ab75227f7d81b4 Mon Sep 17 00:00:00 2001 From: vintro Date: Fri, 23 Feb 2024 14:16:02 -0500 Subject: [PATCH 46/46] ship --- example/discord/honcho-dspy-personas/bot.py | 7 ++--- example/discord/honcho-dspy-personas/chain.py | 5 +++- example/discord/honcho-dspy-personas/graph.py | 22 +++----------- .../honcho-dspy-personas/response_metric.py | 29 +------------------ 4 files changed, 12 insertions(+), 51 deletions(-) diff --git a/example/discord/honcho-dspy-personas/bot.py b/example/discord/honcho-dspy-personas/bot.py index a385ae3..77a7abf 100644 --- a/example/discord/honcho-dspy-personas/bot.py +++ b/example/discord/honcho-dspy-personas/bot.py @@ -12,11 +12,10 @@ intents.members = True intents.reactions = True # Enable reactions intent -# app_id = str(uuid1()) -app_name = "vince-dspy-personas" +app_name = str(uuid1()) -honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local -# honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev +# honcho = Honcho(app_name=app_name, base_url="http://localhost:8000") # uncomment to use local +honcho = Honcho(app_name=app_name) # uses demo server at https://demo.honcho.dev honcho.initialize() bot = discord.Bot(intents=intents) diff --git a/example/discord/honcho-dspy-personas/chain.py b/example/discord/honcho-dspy-personas/chain.py index 119d33b..eede40e 100644 --- a/example/discord/honcho-dspy-personas/chain.py +++ b/example/discord/honcho-dspy-personas/chain.py @@ -82,8 +82,11 @@ async def generate_state_label(cls, existing_states: List[str], state_commentar "state_commentary": state_commentary, "existing_states": existing_states, }) + + # strip anything that's not letters + clean_response = ''.join(c for c in response.content if c.isalpha()) # return output - return response.content + return clean_response @classmethod async def check_state_exists(cls, existing_states: List[str], state: str): diff --git a/example/discord/honcho-dspy-personas/graph.py b/example/discord/honcho-dspy-personas/graph.py index 4f53dae..5c6f6a8 100644 --- a/example/discord/honcho-dspy-personas/graph.py +++ b/example/discord/honcho-dspy-personas/graph.py @@ -2,7 +2,7 @@ import dspy from dspy import Example from typing import List, Optional -from dspy.teleprompt import BootstrapFewShotWithRandomSearch +from dspy.teleprompt import BootstrapFewShot from dotenv import load_dotenv from chain import StateExtractor, format_chat_history from response_metric import metric @@ -66,7 +66,7 @@ async def chat( session: Session, chat_history: List[Message], input: str, - optimization_threshold=5, + optimization_threshold=3, ): user_state_storage = dict(session.user.metadata) # first we need to see if the user has any existing states @@ -87,7 +87,6 @@ async def chat( user_chat_module = ChatWithThought() - # TODO: you'd want to initialize user state object from Honcho # Save the user_state if it's new if is_state_new: user_state_storage[user_state] = {"chat_module": {}, "examples": []} @@ -95,7 +94,6 @@ async def chat( user_state_data = user_state_storage[user_state] # Optimize the state's chat module if we've reached the optimization threshold - # TODO: read in examples from Honcho User Object examples = user_state_data["examples"] print(f"Num examples: {len(examples)}") session.user.update(metadata=user_state_storage) @@ -106,31 +104,19 @@ async def chat( for example in examples: optimizer_example = Example(**example).with_inputs("chat_input", "response", "assessment_dimension") optimizer_examples.append(optimizer_example) - print(isinstance(optimizer_example, Example)) - print(optimizer_example._store) - # examples = [dspy.Example(example).with_inputs("chat_input", "response", "assessment_dimension") for example in examples] - # print(examples) - # Splitting the examples list into train and validation sets - train_examples = examples[:-1] # All but the last item for training - val_examples = examples[-1:] # The last item for validation # Optimize chat module - optimizer = BootstrapFewShotWithRandomSearch(metric=metric, max_bootstrapped_demos=3, max_labeled_demos=3, num_candidate_programs=10, num_threads=4) - # optimizer = BootstrapFewShot(metric=metric, max_rounds=5) + optimizer = BootstrapFewShot(metric=metric, max_rounds=5) - compiled_chat_module = optimizer.compile(ChatWithThought(), trainset=train_examples, valset=val_examples) - # compiled_chat_module = optimizer.compile(user_chat_module, trainset=optimizer_examples) + compiled_chat_module = optimizer.compile(user_chat_module, trainset=optimizer_examples) print(f"COMPILED_CHAT_MODULE: {compiled_chat_module}") - # user_state_data["chat_module"] = compiled_chat_module.dump_state() user_state_storage[user_state][ "chat_module" ] = compiled_chat_module.dump_state() print(f"DUMPED_STATE: {compiled_chat_module.dump_state()}") user_chat_module = compiled_chat_module - # save to file for debugging purposes - # compiled_chat_module.save("module.json") # Update User in Honcho session.user.update(metadata=user_state_storage) diff --git a/example/discord/honcho-dspy-personas/response_metric.py b/example/discord/honcho-dspy-personas/response_metric.py index 3d9a5f1..09ffeb5 100644 --- a/example/discord/honcho-dspy-personas/response_metric.py +++ b/example/discord/honcho-dspy-personas/response_metric.py @@ -30,31 +30,4 @@ def metric(example, pred, trace=None): gpt4T.inspect_history(n=5) print("======== END OPTIMIZER HISTORY ========") - return is_appropriate - - - - - - -# def metric(example, ai_response, trace=None): -# """Assess the quality of a response along the specified dimension.""" -# example = dspy.Example(**example).with_inputs("chat_input", "ai_response", "assessment_dimension") - -# label = example.label -# chat_input = example.chat_input -# ai_response = example.ai_response -# assessment_dimension = example.assessment_dimension - -# with dspy.context(lm=gpt4T): -# assessment_result = dspy.Predict(MessageResponseAssess)( -# chat_input=chat_input, -# ai_response=ai_response, -# assessment_dimension=assessment_dimension -# ) - -# is_positive = assessment_result.assessment_answer.lower() == 'positive' - -# gpt4T.inspect_history(n=3) - -# return is_positive \ No newline at end of file + return is_appropriate \ No newline at end of file