Skip to content

Commit

Permalink
fix(core): Fix chat chat tracer no spans bug
Browse files Browse the repository at this point in the history
  • Loading branch information
fangyinc committed Oct 24, 2023
1 parent 48cd2d6 commit e5e4f54
Show file tree
Hide file tree
Showing 9 changed files with 24 additions and 15 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from pandas import DataFrame

from pilot.base_modules.agent.commands.command_mange import command
from pilot.configs.config import Config
import pandas as pd
import uuid
import os
Expand Down
2 changes: 1 addition & 1 deletion pilot/base_modules/agent/hub/agent_hub.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from ..common.schema import PluginStorageType
from ..plugins_util import scan_plugins, update_from_git

logger = logging.getLogger("agent_hub")
logger = logging.getLogger(__name__)
Default_User = "default"
DEFAULT_PLUGIN_REPO = "https://github.com/eosphoros-ai/DB-GPT-Plugins.git"
TEMP_PLUGIN_PATH = ""
Expand Down
4 changes: 3 additions & 1 deletion pilot/base_modules/agent/plugins_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import git
import threading
import datetime
import logging
from pathlib import Path
from typing import List
from urllib.parse import urlparse
Expand All @@ -19,7 +20,8 @@

from pilot.configs.config import Config
from pilot.configs.model_config import PLUGINS_DIR
from pilot.logs import logger

logger = logging.getLogger(__name__)


def inspect_zip_for_modules(zip_path: str, debug: bool = False) -> list[str]:
Expand Down
2 changes: 1 addition & 1 deletion pilot/base_modules/meta_data/meta_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from pilot.configs.config import Config


logger = logging.getLogger("meta_data")
logger = logging.getLogger(__name__)

CFG = Config()
default_db_path = os.path.join(os.getcwd(), "meta_data")
Expand Down
3 changes: 2 additions & 1 deletion pilot/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,8 @@ def remove_color_codes(s: str) -> str:
return ansi_escape.sub("", s)


logger: Logger = Logger()
# Remove current logger
# logger: Logger = Logger()


def print_assistant_thoughts(
Expand Down
2 changes: 1 addition & 1 deletion pilot/memory/chat_history/store_type/meta_db_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from pilot.memory.chat_history.base import MemoryStoreType

CFG = Config()
logger = logging.getLogger("db_chat_history")
logger = logging.getLogger(__name__)


class DbHistoryMemory(BaseChatHistoryMemory):
Expand Down
13 changes: 10 additions & 3 deletions pilot/openapi/api_v1/api_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
from pilot.memory.chat_history.chat_hisotry_factory import ChatHistory
from pilot.model.cluster import BaseModelController, WorkerManager, WorkerManagerFactory
from pilot.model.base import FlatSupportedModel
from pilot.utils.tracer import root_tracer, SpanType
from pilot.utils.executor_utils import ExecutorFactory, blocking_func_to_async

router = APIRouter()
Expand Down Expand Up @@ -389,7 +390,10 @@ async def chat_completions(dialogue: ConversationVo = Body()):
print(
f"chat_completions:{dialogue.chat_mode},{dialogue.select_param},{dialogue.model_name}"
)
chat: BaseChat = await get_chat_instance(dialogue)
with root_tracer.start_span(
"get_chat_instance", span_type=SpanType.CHAT, metadata=dialogue.dict()
):
chat: BaseChat = await get_chat_instance(dialogue)
# background_tasks = BackgroundTasks()
# background_tasks.add_task(release_model_semaphore)
headers = {
Expand Down Expand Up @@ -440,8 +444,9 @@ async def model_supports(worker_manager: WorkerManager = Depends(get_worker_mana


async def no_stream_generator(chat):
msg = await chat.nostream_call()
yield f"data: {msg}\n\n"
with root_tracer.start_span("no_stream_generator"):
msg = await chat.nostream_call()
yield f"data: {msg}\n\n"


async def stream_generator(chat, incremental: bool, model_name: str):
Expand All @@ -458,6 +463,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
Yields:
_type_: streaming responses
"""
span = root_tracer.start_span("stream_generator")
msg = "[LLM_ERROR]: llm server has no output, maybe your prompt template is wrong."

stream_id = f"chatcmpl-{str(uuid.uuid1())}"
Expand All @@ -483,6 +489,7 @@ async def stream_generator(chat, incremental: bool, model_name: str):
await asyncio.sleep(0.02)
if incremental:
yield "data: [DONE]\n\n"
span.end()


def message2Vo(message: dict, order, model_name) -> MessageVo:
Expand Down
5 changes: 0 additions & 5 deletions pilot/scene/chat_agent/out_parser.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
import json
from typing import Dict, NamedTuple
from pilot.utils import build_logger
from pilot.out_parser.base import BaseOutputParser, T
from pilot.configs.model_config import LOGDIR


logger = build_logger("webserver", LOGDIR + "DbChatOutputParser.log")


class PluginAction(NamedTuple):
Expand Down
7 changes: 6 additions & 1 deletion pilot/scene/chat_factory.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from pilot.scene.base_chat import BaseChat
from pilot.singleton import Singleton
from pilot.utils.tracer import root_tracer


class ChatFactory(metaclass=Singleton):
Expand All @@ -20,7 +21,11 @@ def get_implementation(chat_mode, **kwargs):
implementation = None
for cls in chat_classes:
if cls.chat_scene == chat_mode:
implementation = cls(**kwargs)
metadata = {"cls": str(cls), "params": kwargs}
with root_tracer.start_span(
"get_implementation_of_chat", metadata=metadata
):
implementation = cls(**kwargs)
if implementation == None:
raise Exception(f"Invalid implementation name:{chat_mode}")
return implementation

0 comments on commit e5e4f54

Please sign in to comment.