Skip to content

Commit

Permalink
feat(deriver): Implement Baseline VOE into deriver (#62)
Browse files Browse the repository at this point in the history
* initial prompts, logic

* ready for process_ai_message fn

* voe deriver logic probably mostly complete, switch to anthropic too

* docker compose working

* verbosity and fixes

* add colors

* Remove logging for debugging

* metamessage query not working

* use the right message id

* shit show status

* call sql directly

* mvp

* chore: Azure and New Mirascope

* fix check dups

* fix fn def

* code review

---------

Co-authored-by: vintro <[email protected]>
  • Loading branch information
VVoruganti and vintrocode committed Jun 1, 2024
1 parent a4632d4 commit 659b13a
Show file tree
Hide file tree
Showing 7 changed files with 691 additions and 414 deletions.
9 changes: 9 additions & 0 deletions .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,20 @@ CONNECTION_URI=postgresql+psycopg://testuser:testpwd@localhost:5432/honcho # sam
# CONNECTION_URI=postgresql+psycopg://testuser:testpwd@database:5432/honcho # sample for docker-compose database

OPENAI_API_KEY=
ANTHROPIC_API_KEY=

# Azure

AZURE_OPENAI_ENDPOINT=
AZURE_OPENAI_API_KEY=
AZURE_OPENAI_API_VERSION=
AZURE_OPENAI_DEPLOYMENT=

# Logging

OPENTELEMETRY_ENABLED=false # Set to true to enable OpenTelemetry logging and tracing
SENTRY_ENABLED=false # Set to true to enable Sentry logging and tracing
LOGFIRE_TOKEN= # optional logfire config

# Auth

Expand Down
515 changes: 254 additions & 261 deletions poetry.lock

Large diffs are not rendered by default.

13 changes: 6 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,17 @@ fastapi-pagination = "^0.12.24"
pgvector = "^0.2.5"
openai = "^1.12.0"
sentry-sdk = "^2.3.0"
opentelemetry-instrumentation-fastapi = "^0.44b0"
opentelemetry-api = "^1.23.0"
opentelemetry-sdk = "^1.23.0"
opentelemetry-exporter-otlp = "^1.23.0"
opentelemetry-instrumentation-sqlalchemy = "^0.44b0"
opentelemetry-instrumentation-logging = "^0.44b0"
greenlet = "^3.0.3"
psycopg = {extras= ["binary"], version="^3.1.19"}
httpx = "^0.27.0"
uvloop = "^0.19.0"
httptools = "^0.6.1"
mirascope = "^0.14.0"
mirascope = "^0.15.1"
opentelemetry-instrumentation-fastapi = "^0.45b0"
opentelemetry-sdk = "^1.24.0"
opentelemetry-exporter-otlp = "^1.24.0"
opentelemetry-instrumentation-sqlalchemy = "^0.45b0"
opentelemetry-instrumentation-logging = "^0.45b0"

[tool.ruff.lint]
# from https://docs.astral.sh/ruff/linter/#rule-selection example
Expand Down
18 changes: 16 additions & 2 deletions src/agent.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import os
import uuid

from dotenv import load_dotenv
from mirascope.openai import OpenAICall, OpenAICallParams
from mirascope.base import BaseConfig
from mirascope.openai import OpenAICall, OpenAICallParams, azure_client_wrapper
from sqlalchemy.ext.asyncio import AsyncSession

from . import crud, schemas
Expand All @@ -21,7 +23,19 @@ class Dialectic(OpenAICall):
agent_input: str
retrieved_facts: str

call_params = OpenAICallParams(model="gpt-4o-2024-05-13")
configuration = BaseConfig(
client_wrappers=[
azure_client_wrapper(
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
api_version=os.getenv("AZURE_OPENAI_API_VERSION"),
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
)
]
)
call_params = OpenAICallParams(
model=os.getenv("AZURE_OPENAI_DEPLOYMENT"), temperature=1.2, top_p=0.5
)
# call_params = OpenAICallParams(model="gpt-4o-2024-05-13")


async def prep_inference(
Expand Down
Loading

0 comments on commit 659b13a

Please sign in to comment.