Skip to content

Commit

Permalink
feat: Improving otel events to include more of the prompt context
Browse files Browse the repository at this point in the history
  • Loading branch information
elviskahoro committed Oct 15, 2024
1 parent c135e98 commit a8dfefa
Showing 1 changed file with 44 additions and 33 deletions.
77 changes: 44 additions & 33 deletions chat_v2/chat_v2/page_chat/chat_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,25 @@
from openai import OpenAI

# Import open-telemetry dependencies
from opentelemetry import trace as trace_api
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
from openinference.semconv.trace import SpanAttributes

from openinference.instrumentation import using_prompt_template
from sqlalchemy import or_, select
from together import Together

from .chat_messages.model_chat_interaction import ChatInteraction

AI_MODEL: str = "UNKNOWN"
AI_MODEL: str = "UNKNOWN"
OTEL_HEADERS: str | None = None
OTEL_ENDPOINT: str | None = None
RUN_WITH_OTEL: bool = False


def get_ai_client() -> OpenAI | Together:
ai_provider = os.environ.get("AI_PROVIDER")
match ai_provider:
Expand Down Expand Up @@ -64,7 +69,6 @@ def get_otel_headers() -> None:
OTEL_ENDPOINT = "https://otlp.arize.com/v1"
RUN_WITH_OTEL = True


case "phoenix":
OTEL_HEADERS = f"api_key={os.environ.get('PHOENIX_API_KEY')}"
os.environ["PHOENIX_CLIENT_HEADERS"] = OTEL_HEADERS
Expand Down Expand Up @@ -99,19 +103,19 @@ def get_otel_headers() -> None:
tracer_provider.add_span_processor(
BatchSpanProcessor(
OTLPSpanExporter(
endpoint=OTEL_ENDPOINT,
endpoint=OTEL_ENDPOINT,
),
),
),
)
)
else:
tracer_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter()))

trace_api.set_tracer_provider(
trace.set_tracer_provider(
tracer_provider=tracer_provider,
)

# To get your tracer
tracer = trace_api.get_tracer(__name__)
tracer = trace.get_tracer(__name__)


MAX_QUESTIONS = 10
Expand Down Expand Up @@ -376,31 +380,38 @@ def add_new_chat_interaction() -> None:

yield

stream = await _fetch_chat_completion_session(prompt)
clear_ui_loading_state()
add_new_chat_interaction()
yield

try:
for item in stream:
if item.choices and item.choices[0] and item.choices[0].delta:
answer_text = item.choices[0].delta.content
# Ensure answer_text is not None before concatenation
if answer_text is not None:
self.chat_history[-1].answer += answer_text

else:
answer_text = ""
self.chat_history[-1].answer += answer_text

yield rx.scroll_to(
elem_id=INPUT_BOX_ID,
)

except StopAsyncIteration:
raise

self.result = self.chat_history[-1].answer
with using_prompt_template(
template=prompt,
):
stream = await _fetch_chat_completion_session(prompt)
clear_ui_loading_state()
add_new_chat_interaction()
yield

try:
for item in stream:
if item.choices and item.choices[0] and item.choices[0].delta:
answer_text = item.choices[0].delta.content
# Ensure answer_text is not None before concatenation
if answer_text is not None:
self.chat_history[-1].answer += answer_text

else:
answer_text = ""
self.chat_history[-1].answer += answer_text

yield rx.scroll_to(
elem_id=INPUT_BOX_ID,
)

except StopAsyncIteration:
raise

self.result = self.chat_history[-1].answer
trace.get_current_span().set_attribute(
SpanAttributes.OUTPUT_VALUE,
self.result,
)

self._save_resulting_chat_interaction(
chat_interaction=self.chat_history[-1],
Expand Down

0 comments on commit a8dfefa

Please sign in to comment.