diff --git a/dbgpt/app/openapi/api_v2.py b/dbgpt/app/openapi/api_v2.py index 8141aba2b..92857eadd 100644 --- a/dbgpt/app/openapi/api_v2.py +++ b/dbgpt/app/openapi/api_v2.py @@ -72,7 +72,6 @@ async def check_api_key( @router.post("/v2/chat/completions", dependencies=[Depends(check_api_key)]) async def chat_completions( request: ChatCompletionRequestBody = Body(), - flow_service: FlowService = Depends(get_chat_flow), ): """Chat V2 completions Args: @@ -121,7 +120,9 @@ async def chat_completions( media_type="text/event-stream", ) elif ( - request.chat_mode is None or request.chat_mode == ChatMode.CHAT_KNOWLEDGE.value + request.chat_mode is None + or request.chat_mode == ChatMode.CHAT_NORMAL.value + or request.chat_mode == ChatMode.CHAT_KNOWLEDGE.value ): with root_tracer.start_span( "get_chat_instance", span_type=SpanType.CHAT, metadata=request.dict() diff --git a/dbgpt/client/app.py b/dbgpt/client/app.py index f25589d26..594576cb0 100644 --- a/dbgpt/client/app.py +++ b/dbgpt/client/app.py @@ -1,21 +1,49 @@ """App Client API.""" -from dbgpt.client.client import Client +from typing import List +from dbgpt.client.client import Client, ClientException +from dbgpt.client.schemas import AppModel +from dbgpt.serve.core import Result -async def get_app(client: Client, app_id: str): + +async def get_app(client: Client, app_id: str) -> AppModel: """Get an app. Args: client (Client): The dbgpt client. app_id (str): The app id. + Returns: + AppModel: The app model. + Raises: + ClientException: If the request failed. """ - return await client.get("/apps/" + app_id) + try: + res = await client.get("/apps/" + app_id) + result: Result = res.json() + if result["success"]: + return AppModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to get app: {e}") -async def list_app(client: Client): +async def list_app(client: Client) -> List[AppModel]: """List apps. Args: client (Client): The dbgpt client. + Returns: + List[AppModel]: The list of app models. + Raises: + ClientException: If the request failed. """ - return await client.get("/apps") + try: + res = await client.get("/apps") + result: Result = res.json() + if result["success"]: + return [AppModel(**app) for app in result["data"]["app_list"]] + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to list apps: {e}") diff --git a/dbgpt/client/client.py b/dbgpt/client/client.py index 84a20b238..b2c7f40ad 100644 --- a/dbgpt/client/client.py +++ b/dbgpt/client/client.py @@ -24,17 +24,11 @@ def __init__(self, status=None, reason=None, http_resp=None): reason: Optional[str], the reason for the exception. http_resp: Optional[httpx.Response], the HTTP response object. """ - reason = json.loads(reason) - if http_resp: - self.status = http_resp.status_code - self.reason = http_resp.content - self.body = http_resp.content - self.headers = None - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None + self.status = status + self.reason = reason + self.http_resp = http_resp + self.headers = http_resp.headers if http_resp else None + self.body = http_resp.text if http_resp else None def __str__(self): """Return the error message.""" diff --git a/dbgpt/client/flow.py b/dbgpt/client/flow.py index cb58e325d..489b99ae2 100644 --- a/dbgpt/client/flow.py +++ b/dbgpt/client/flow.py @@ -1,55 +1,114 @@ """this module contains the flow client functions.""" -from dbgpt.client.client import Client +from typing import List + +from dbgpt.client.client import Client, ClientException from dbgpt.core.awel.flow.flow_factory import FlowPanel +from dbgpt.serve.core import Result -async def create_flow(client: Client, flow: FlowPanel): +async def create_flow(client: Client, flow: FlowPanel) -> FlowPanel: """Create a new flow. Args: client (Client): The dbgpt client. flow (FlowPanel): The flow panel. """ - return await client.get("/awel/flows", flow.dict()) + try: + res = await client.get("/awel/flows", flow.dict()) + result: Result = res.json() + if result["success"]: + return FlowPanel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to create flow: {e}") -async def update_flow(client: Client, flow: FlowPanel): +async def update_flow(client: Client, flow: FlowPanel) -> FlowPanel: """Update a flow. Args: client (Client): The dbgpt client. flow (FlowPanel): The flow panel. + Returns: + FlowPanel: The flow panel. + Raises: + ClientException: If the request failed. """ - return await client.put("/awel/flows", flow.dict()) + try: + res = await client.put("/awel/flows", flow.dict()) + result: Result = res.json() + if result["success"]: + return FlowPanel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to update flow: {e}") -async def delete_flow(client: Client, flow_id: str): +async def delete_flow(client: Client, flow_id: str) -> FlowPanel: """ Delete a flow. Args: client (Client): The dbgpt client. flow_id (str): The flow id. + Returns: + FlowPanel: The flow panel. + Raises: + ClientException: If the request failed. """ - return await client.get("/awel/flows/" + flow_id) + try: + res = await client.delete("/awel/flows/" + flow_id) + result: Result = res.json() + if result["success"]: + return FlowPanel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to delete flow: {e}") -async def get_flow(client: Client, flow_id: str): +async def get_flow(client: Client, flow_id: str) -> FlowPanel: """ Get a flow. Args: client (Client): The dbgpt client. flow_id (str): The flow id. + Returns: + FlowPanel: The flow panel. + Raises: + ClientException: If the request failed. """ - return await client.get("/awel/flows/" + flow_id) + try: + res = await client.get("/awel/flows/" + flow_id) + result: Result = res.json() + if result["success"]: + return FlowPanel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to get flow: {e}") -async def list_flow(client: Client): +async def list_flow(client: Client) -> List[FlowPanel]: """ List flows. Args: client (Client): The dbgpt client. + Returns: + List[FlowPanel]: The list of flow panels. + Raises: + ClientException: If the request failed. """ - return await client.get("/awel/flows") + try: + res = await client.get("/awel/flows") + result: Result = res.json() + if result["success"]: + return [FlowPanel(**flow) for flow in result["data"]["items"]] + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to list flows: {e}") diff --git a/dbgpt/client/knowledge.py b/dbgpt/client/knowledge.py index ed5950ceb..5ed1cdd71 100644 --- a/dbgpt/client/knowledge.py +++ b/dbgpt/client/knowledge.py @@ -1,104 +1,220 @@ """Knowledge API client.""" import json +from typing import List -from dbgpt.client.client import Client +from dbgpt.client.client import Client, ClientException from dbgpt.client.schemas import DocumentModel, SpaceModel, SyncModel +from dbgpt.serve.core import Result -async def create_space(client: Client, app_model: SpaceModel): +async def create_space(client: Client, space_model: SpaceModel) -> SpaceModel: """Create a new space. Args: client (Client): The dbgpt client. - app_model (SpaceModel): The app model. + space_model (SpaceModel): The space model. + Returns: + SpaceModel: The space model. + Raises: + ClientException: If the request failed. """ - return await client.post("/knowledge/spaces", app_model.dict()) - - -async def update_space(client: Client, app_model: SpaceModel): + try: + res = await client.post("/knowledge/spaces", space_model.dict()) + result: Result = res.json() + if result["success"]: + return SpaceModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to create space: {e}") + + +async def update_space(client: Client, space_model: SpaceModel) -> SpaceModel: """Update a document. Args: client (Client): The dbgpt client. - app_model (SpaceModel): The app model. + space_model (SpaceModel): The space model. + Returns: + SpaceModel: The space model. + Raises: + ClientException: If the request failed. """ - return await client.put("/knowledge/spaces", app_model.dict()) - - -async def delete_space(client: Client, space_id: str): + try: + res = await client.put("/knowledge/spaces", space_model.dict()) + result: Result = res.json() + if result["success"]: + return SpaceModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to update space: {e}") + + +async def delete_space(client: Client, space_id: str) -> SpaceModel: """Delete a space. Args: client (Client): The dbgpt client. - app_id (str): The app id. + space_id (str): The space id. + Returns: + SpaceModel: The space model. + Raises: + ClientException: If the request failed. """ - return await client.delete("/knowledge/spaces/" + space_id) - - -async def get_space(client: Client, space_id: str): + try: + res = await client.delete("/knowledge/spaces/" + space_id) + result: Result = res.json() + if result["success"]: + return SpaceModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to delete space: {e}") + + +async def get_space(client: Client, space_id: str) -> SpaceModel: """Get a document. Args: client (Client): The dbgpt client. - app_id (str): The app id. + space_id (str): The space id. + Returns: + SpaceModel: The space model. + Raises: + ClientException: If the request failed. """ - return await client.get("/knowledge/spaces/" + space_id) + try: + res = await client.get("/knowledge/spaces/" + space_id) + result: Result = res.json() + if result["success"]: + return SpaceModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to get space: {e}") -async def list_space(client: Client): - """List apps. +async def list_space(client: Client) -> List[SpaceModel]: + """List spaces. Args: client (Client): The dbgpt client. + Returns: + List[SpaceModel]: The list of space models. + Raises: + ClientException: If the request failed. """ - return await client.get("/knowledge/spaces") + try: + res = await client.get("/knowledge/spaces") + result: Result = res.json() + if result["success"]: + return [SpaceModel(**space) for space in result["data"]["items"]] + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to list spaces: {e}") -async def create_document(client: Client, doc_model: DocumentModel): - """Create a new space. +async def create_document(client: Client, doc_model: DocumentModel) -> DocumentModel: + """Create a new document. Args: client (Client): The dbgpt client. doc_model (SpaceModel): The document model. - """ - return await client.post_param("/knowledge/documents", doc_model.dict()) - -async def delete_document(client: Client, document_id: str): + """ + try: + res = await client.post_param("/knowledge/documents", doc_model.dict()) + result: Result = res.json() + if result["success"]: + return DocumentModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to create document: {e}") + + +async def delete_document(client: Client, document_id: str) -> DocumentModel: """Delete a document. Args: client (Client): The dbgpt client. - app_id (str): The app id. + document_id (str): The document id. + Returns: + DocumentModel: The document model. + Raises: + ClientException: If the request failed. """ - return await client.delete("/knowledge/documents/" + document_id) - - -async def get_document(client: Client, document_id: str): + try: + res = await client.delete("/knowledge/documents/" + document_id) + result: Result = res.json() + if result["success"]: + return DocumentModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to delete document: {e}") + + +async def get_document(client: Client, document_id: str) -> DocumentModel: """Get a document. Args: client (Client): The dbgpt client. - app_id (str): The app id. + document_id (str): The document id. + Returns: + DocumentModel: The document model. + Raises: + ClientException: If the request failed. """ - return await client.get("/knowledge/documents/" + document_id) - - -async def list_document(client: Client): + try: + res = await client.get("/knowledge/documents/" + document_id) + result: Result = res.json() + if result["success"]: + return DocumentModel(**result["data"]) + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to get document: {e}") + + +async def list_document(client: Client) -> List[DocumentModel]: """List documents. Args: client (Client): The dbgpt client. """ - return await client.get("/knowledge/documents") - - -async def sync_document(client: Client, sync_model: SyncModel): + try: + res = await client.get("/knowledge/documents") + result: Result = res.json() + if result["success"]: + return [DocumentModel(**document) for document in result["data"]["items"]] + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to list documents: {e}") + + +async def sync_document(client: Client, sync_model: SyncModel) -> List: """Sync document. Args: client (Client): The dbgpt client. + sync_model (SyncModel): The sync model. + Returns: + List: The list of document ids. + Raises: + ClientException: If the request failed. """ - return await client.post( - "/knowledge/documents/sync", [json.loads(sync_model.json())] - ) + try: + res = await client.post( + "/knowledge/documents/sync", [json.loads(sync_model.json())] + ) + result: Result = res.json() + if result["success"]: + return result["data"] + else: + raise ClientException(status=result["err_code"], reason=result) + except Exception as e: + raise ClientException(f"Failed to list documents: {e}") diff --git a/dbgpt/client/schemas.py b/dbgpt/client/schemas.py index c2bb15524..aab0f0211 100644 --- a/dbgpt/client/schemas.py +++ b/dbgpt/client/schemas.py @@ -21,7 +21,7 @@ class ChatCompletionRequestBody(BaseModel): messages: Union[str, List[str]] = Field( ..., description="User input messages", examples=["Hello", "How are you?"] ) - stream: bool = Field(default=False, description="Whether return stream") + stream: bool = Field(default=True, description="Whether return stream") temperature: Optional[float] = Field( default=None, @@ -174,6 +174,10 @@ class AppModel(BaseModel): class SpaceModel(BaseModel): """Space model.""" + id: str = Field( + default=None, + description="space id", + ) name: str = Field( default=None, description="knowledge space name", @@ -190,6 +194,10 @@ class SpaceModel(BaseModel): default=None, description="space owner", ) + context: Optional[str] = Field( + default=None, + description="space argument context", + ) class DocumentModel(BaseModel): diff --git a/dbgpt/serve/flow/api/endpoints.py b/dbgpt/serve/flow/api/endpoints.py index 2b2d8b723..c3939ceb8 100644 --- a/dbgpt/serve/flow/api/endpoints.py +++ b/dbgpt/serve/flow/api/endpoints.py @@ -147,8 +147,8 @@ async def delete(uid: str, service: Service = Depends(get_service)) -> Result[No Returns: Result[None]: The response """ - service.delete(uid) - return Result.succ(None) + inst = service.delete(uid) + return Result.succ(inst) @router.get("/flows/{uid}") diff --git a/dbgpt/serve/rag/api/schemas.py b/dbgpt/serve/rag/api/schemas.py index 0ffb986d6..1b9a91357 100644 --- a/dbgpt/serve/rag/api/schemas.py +++ b/dbgpt/serve/rag/api/schemas.py @@ -15,11 +15,17 @@ class SpaceServeRequest(BaseModel): id: Optional[int] = Field(None, description="The space id") name: str = Field(None, description="The space name") """vector_type: vector type""" - vector_type: str = Field(None, description="The vector type") + vector_type: str = Field("Chroma", description="The vector type") """desc: description""" - desc: str = Field(None, description="The description") + desc: Optional[str] = Field(None, description="The description") """owner: owner""" - owner: str = Field(None, description="The owner") + owner: Optional[str] = Field(None, description="The owner") + """context: argument context""" + context: Optional[str] = Field(None, description="The context") + """gmt_created: created time""" + gmt_created: Optional[str] = Field(None, description="The created time") + """gmt_modified: modified time""" + gmt_modified: Optional[str] = Field(None, description="The modified time") class DocumentServeRequest(BaseModel): diff --git a/dbgpt/serve/rag/models/models.py b/dbgpt/serve/rag/models/models.py index 809bc3eea..f8fd2986c 100644 --- a/dbgpt/serve/rag/models/models.py +++ b/dbgpt/serve/rag/models/models.py @@ -38,7 +38,7 @@ def create_knowledge_space(self, space: SpaceServeRequest): session.commit() space_id = knowledge_space.id session.close() - return space_id + return self.to_response(knowledge_space) def get_knowledge_space(self, query: KnowledgeSpaceEntity): """Get knowledge space by query""" @@ -81,11 +81,21 @@ def get_knowledge_space(self, query: KnowledgeSpaceEntity): def update_knowledge_space(self, space: KnowledgeSpaceEntity): """Update knowledge space""" + session = self.get_raw_session() - session.merge(space) + request = SpaceServeRequest(id=space.id) + update_request = self.to_request(space) + query = self._create_query_object(session, request) + entry = query.first() + if entry is None: + raise Exception("Invalid request") + for key, value in update_request.dict().items(): # type: ignore + if value is not None: + setattr(entry, key, value) + session.merge(entry) session.commit() session.close() - return True + return self.to_response(space) def delete_knowledge_space(self, space: KnowledgeSpaceEntity): """Delete knowledge space""" @@ -127,6 +137,7 @@ def to_request(self, entity: KnowledgeSpaceEntity) -> SpaceServeRequest: vector_type=entity.vector_type, desc=entity.desc, owner=entity.owner, + context=entity.context, ) def to_response(self, entity: KnowledgeSpaceEntity) -> SpaceServeResponse: diff --git a/dbgpt/serve/rag/service/service.py b/dbgpt/serve/rag/service/service.py index 3910fcc34..4cb100c50 100644 --- a/dbgpt/serve/rag/service/service.py +++ b/dbgpt/serve/rag/service/service.py @@ -145,9 +145,7 @@ def update_space(self, request: SpaceServeRequest) -> SpaceServeResponse: status_code=400, detail=f"no space name named {request.name}", ) - space = spaces[0] - query_request = {"id": space.id} - update_obj = self._dao.update(query_request, update_request=request) + update_obj = self._dao.update_knowledge_space(self._dao.from_request(request)) return update_obj async def create_document( diff --git a/docs/docs/api/app.md b/docs/docs/api/app.md new file mode 100644 index 000000000..1972719cd --- /dev/null +++ b/docs/docs/api/app.md @@ -0,0 +1,188 @@ +# App + +Get started with the App API + +# Chat App + +```python +POST /api/v2/chat/completions +``` +### Examples + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +### Stream Chat App + + + + + + +```shell + DBGPT_API_KEY="dbgpt" + APP_ID="{YOUR_APP_ID}" + + curl -X POST "http://localhost:5000/api/v2/chat/completions" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ + -H "accept: application/json" \ + -H "Content-Type: application/json" \ + -d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"chat_mode\": \"chat_app\", \"chat_param\": "$APP_ID"}" + +``` + + + + +```python +from dbgpt.client.client import Client + +DBGPT_API_KEY = "dbgpt" +APP_ID="{YOUR_APP_ID}" + +client = Client(api_key=DBGPT_API_KEY) +response = client.chat_stream(messages="Introduce AWEL", model="chatgpt_proxyllm", chat_mode="chat_app", chat_param=APP_ID) +``` + + + +### Chat Completion Stream Response +```commandline +data: {"id": "109bfc28-fe87-452c-8e1f-d4fe43283b7d", "created": 1710919480, "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "```agent-plans\n[{\"name\": \"Introduce Awel\", \"num\": 2, \"status\": \"complete\", \"agent\": \"Human\", \"markdown\": \"```agent-messages\\n[{\\\"sender\\\": \\\"Summarizer\\\", \\\"receiver\\\": \\\"Human\\\", \\\"model\\\": \\\"chatgpt_proxyllm\\\", \\\"markdown\\\": \\\"Agentic Workflow Expression Language (AWEL) is a specialized language designed for developing large model applications with intelligent agent workflows. It offers flexibility and functionality, allowing developers to focus on business logic for LLMs applications without getting bogged down in model and environment details. AWEL uses a layered API design architecture, making it easier to work with. You can find examples and source code to get started with AWEL, and it supports various operators and environments. AWEL is a powerful tool for building native data applications through workflows and agents.\"}]\n```"}}]} + +data: [DONE] +``` +### Get App + +```python +GET /api/v2/serve/apps/{app_id} +``` + +#### Query Parameters +________ +app_id string Required + +app id +________ + +#### Response body +Return App Object + +### List App + +```python +GET /api/v2/serve/apps +``` + +#### Response body +Return App Object List + +### The App Model +________ +id string + +space id +________ +app_code string + +app code +________ +app_name string + +app name +________ + +app_describe string + +app describe +________ +team_mode string + +team mode +________ +language string + +language +________ +team_context string + +team context +________ +user_code string + +user code +________ +sys_code string + +sys code +________ +is_collected string + +is collected +________ +icon string + +icon +________ +created_at string + +created at +________ +updated_at string + +updated at +________ +details string + +app details List[AppDetailModel] +________ + +### The App Detail Model +________ +app_code string + +app code +________ +app_name string + +app name +________ +agent_name string + +agent name +________ +node_id string + +node id +________ +resources string + +resources +________ +prompt_template string + +prompt template +________ +llm_strategy string + +llm strategy +________ +llm_strategy_value string + +llm strategy value +________ +created_at string + +created at +________ +updated_at string + +updated at +________ diff --git a/docs/docs/api/chat.md b/docs/docs/api/chat.md new file mode 100644 index 000000000..5ea2d8ef6 --- /dev/null +++ b/docs/docs/api/chat.md @@ -0,0 +1,280 @@ +# Chat + +Given a list of messages comprising a conversation, the model will return a response. + +# Create Chat Completion + +```python +POST /api/v2/chat/completions +``` + +### Examples + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +### Stream Chat Completion + + + + + + +```shell + DBGPT_API_KEY="dbgpt" + + curl -X POST "http://localhost:5000/api/v2/chat/completions" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ + -H "accept: application/json" \ + -H "Content-Type: application/json" \ + -d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"stream\": true}" + +``` + + + + +```python +from dbgpt.client.client import Client + +DBGPT_API_KEY = "dbgpt" +client = Client(api_key=DBGPT_API_KEY) +response = client.chat_stream(messages="Hello", model="chatgpt_proxyllm") +``` + + + +### Chat Completion Stream Response +```commandline +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "Hello"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "!"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " How"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " can"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " I"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " assist"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " you"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " today"}}]} + +data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "?"}}]} + +data: [DONE] +``` + +### Chat Completion + + + + +```shell + DBGPT_API_KEY="dbgpt" + + curl -X POST "http://localhost:5000/api/v2/chat/completions" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ + -H "accept: application/json" \ + -H "Content-Type: application/json" \ + -d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"stream\": false}" +``` + + + + +```python +from dbgpt.client.client import Client + +DBGPT_API_KEY = "dbgpt" +client = Client(api_key=DBGPT_API_KEY) +response = client.chat(messages="Hello", model="chatgpt_proxyllm") +``` + + + +### Chat Completion Response +```json +{ + "id": "a8321543-52e9-47a5-a0b6-3d997463f6a3", + "object": "chat.completion", + "created": 1710826792, + "model": "chatgpt_proxyllm", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello! How can I assist you today?" + }, + "finish_reason": null + } + ], + "usage": { + "prompt_tokens": 0, + "total_tokens": 0, + "completion_tokens": 0 + } +} +``` + + + +### Request body +________ +messages string Required + +A list of messages comprising the conversation so far. Example Python code. +________ +model string Required + +ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API. +________ +chat_mode string Optional + +The DB-GPT chat mode, which can be one of the following: `chat_normal`, `chat_app`, `chat_knowledge`, `chat_flow`, default is `chat_normal`. +________ +chat_param string Optional + +The DB-GPT The chat param value of chat mode: `{app_id}`, `{space_id}`, `{flow_id}`, default is `None`. +________ +max_new_tokens integer Optional + +The maximum number of tokens that can be generated in the chat completion. + +The total length of input tokens and generated tokens is limited by the model's context length. +________ +stream integer Optional + +If set, partial message deltas will be sent. +Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a `data: [DONE]` +________ +temperature integer Optional + +What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. +________ +conv_uid string Optional + +The conversation id of the model inference, default is `None` +________ +span_id string Optional + +The span id of the model inference, default is `None` +________ +sys_code string Optional + +The system code, default is `None` +________ +user_name string Optional + +The web server user name, default is `None` +________ + + +### Chat Stream Response Body +________ +id string + +conv_uid of the convsersation. +________ +model string + +The model used for the chat completion. + +________ +created string + +The Unix timestamp (in seconds) of when the chat completion was created. +________ +choices array + +A list of chat completion choices. Can be more than one if n is greater than 1. + + - index integer + + The index of the choice in the list of choices. + - delta object + + The chat completion delta. + - role string + + The role of the speaker. Can be `user` or `assistant`. + - content string + + The content of the message. + - finish_reason string + + The reason the chat completion finished. Can be `max_tokens` or `stop`. +________ + + +### Chat Response Body +________ +id string + +conv_uid of the convsersation. +________ +model string + +The model used for the chat completion. + +________ +created string + +The Unix timestamp (in seconds) of when the chat completion was created. +________ +object string + +The object type of the chat completion. +________ +choices array + +A list of chat completion choices. Can be more than one if n is greater than 1. + + - index integer + + The index of the choice in the list of choices. + + - delta object + + The chat completion delta. + - role string + + The role of the speaker. Can be `user` or `assistant`. + - content string + + The content of the message. + - finish_reason string + + The reason the chat completion finished. Can be `max_tokens` or `stop`. +________ +usage object + + The usage statistics for the chat completion. + - prompt_tokens integer + + The number of tokens in the prompt. + - total_tokens integer + + The total number of tokens in the chat completion. + - completion_tokens integer + + The number of tokens in the chat completion. + + diff --git a/docs/docs/api/flow.md b/docs/docs/api/flow.md new file mode 100644 index 000000000..e91438c20 --- /dev/null +++ b/docs/docs/api/flow.md @@ -0,0 +1,306 @@ +# Flow + +Get started with the App API + +# Chat Flow + +```python +POST /api/v2/chat/completions +``` +### Examples + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +### Stream Chat Flow + + + + + + +```shell + DBGPT_API_KEY="dbgpt" + FLOW_ID="{YOUR_FLOW_ID}" + + curl -X POST "http://localhost:5000/api/v2/chat/completions" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ + -H "accept: application/json" \ + -H "Content-Type: application/json" \ + -d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"chat_mode\": \"chat_flow\", \"chat_param\": "$FLOW_ID"}" + +``` + + + + +```python +from dbgpt.client.client import Client + +DBGPT_API_KEY = "dbgpt" +FLOW_ID="{YOUR_FLOW_ID}" + +client = Client(api_key=DBGPT_API_KEY) +response = client.chat_stream(messages="Hello", model="chatgpt_proxyllm", chat_mode="chat_flow", chat_param=FLOW_ID) +``` + + + +#### Chat Completion Stream Response +```commandline +data: {"id": "579f8862-fc4b-481e-af02-a127e6d036c8", "created": 1710918094, "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "\n\n"}}]} +``` +### Create Flow + +```python +POST /api/v2/serve/awel/flows +``` +#### Request body +Request Flow Object + +#### Response body +Return Flow Object + + +### Update Flow + +PUT /api/v2/serve/awel/flows + +#### Request body +Request Flow Object + +#### Response body +Return Flow Object + +### Delete Flow + +```python +DELETE /api/v2/serve/awel/flows +``` + + + + + +```shell + DBGPT_API_KEY="dbgpt" + FLOW_ID="{YOUR_FLOW_ID}" + + curl -X DELETE "http://localhost:5000/api/v2/serve/knowledge/spaces/$FLOW_ID" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ + +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.flow import delete_flow + +DBGPT_API_KEY = "dbgpt" +flow_id = "{your_flow_id}" + +client = Client(api_key=DBGPT_API_KEY) +res = await delete_flow(client=client, flow_id=flow_id) + +``` + + + + +#### Delete Parameters +________ +uid string Required + +flow id +________ + +#### Response body +Return Flow Object + +### Get Flow + +```python +GET /api/v2/serve/awel/flows/{flow_id} +``` + + + + +```shell + DBGPT_API_KEY="dbgpt" + FLOW_ID="{YOUR_FLOW_ID}" + + curl --location --request GET 'http://localhost:5000/api/v2/serve/awel/flows/$FLOW_ID' \ + --header 'Authorization: Bearer $DBGPT_API_KEY' +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.knowledge import get_flow + +DBGPT_API_KEY = "dbgpt" +flow_id = "{your_flow_id}" + +client = Client(api_key=DBGPT_API_KEY) +res = await get_flow(client=client, flow_id=flow_id) + +``` + + + + +#### Query Parameters +________ +uid string Required + +flow id +________ + +#### Response body +Return Flow Object + +### List Flow + +```python +GET /api/v2/serve/awel/flows +``` + + + + + + +```shell + DBGPT_API_KEY="dbgpt" + + curl -X GET "http://localhost:5000/api/v2/serve/awel/flows" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.flow import list_flow + +DBGPT_API_KEY = "dbgpt" + +client = Client(api_key=DBGPT_API_KEY) +res = await list_flow(client=client) + +``` + + + + +#### Response body +Return Flow Object + +### The Flow Object + +________ +uid string + +The unique id for the flow. +________ +name string + +The name of the flow. +________ +description string + +The description of the flow. +________ +label string + +The label of the flow. +________ +flow_category string + +The category of the flow. Default is FlowCategory.COMMON. +________ +flow_data object + +The flow data. +________ +state string + +The state of the flow.Default is INITIALIZING. +________ +error_message string + +The error message of the flow. +________ +source string + +The source of the flow. Default is DBGPT-WEB. +________ +source_url string + +The source url of the flow. +________ +version string + +The version of the flow. Default is 0.1.0. +________ +editable boolean + +Whether the flow is editable. Default is True. +________ +user_name string + +The user name of the flow. +________ +sys_code string + +The system code of the flow. +________ +dag_id string + +The dag id of the flow. +________ +gmt_created string + +The created time of the flow. +________ +gmt_modified string + +The modified time of the flow. +________ \ No newline at end of file diff --git a/docs/docs/api/introduction.md b/docs/docs/api/introduction.md new file mode 100644 index 000000000..d195b7c56 --- /dev/null +++ b/docs/docs/api/introduction.md @@ -0,0 +1,37 @@ +# Introduction + +This is the introduction to the DB-GPT API documentation. You can interact with the API through HTTP requests from any language, via our official Python Client bindings. + +# Authentication +The DB-GPT API uses API keys for authentication. Visit your API Keys page to retrieve the API key you'll use in your requests. + +Production requests must be routed through your own backend server where your API key can be securely loaded from an environment variable or key management service. + +All API requests should include your API key in an Authorization HTTP header as follows: + + ```http + Authorization: Bearer DBGPT_API_KEY + ``` +Example with the DB-GPT API curl command: + + ```bash + curl "http://localhost:5000/api/v2/chat/completions" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ + ``` +Example with the DB-GPT Client Python package: + + ```python + from dbgpt.client.client import Client + + DBGPT_API_KEY = "dbgpt" + client = Client(api_key=DBGPT_API_KEY) + ``` +Set the API Key in .env file as follows: +:::info note +API_KEYS - The list of API keys that are allowed to access the API. Each of the below are an option, separated by commas. +::: +```python +API_KEYS=dbgpt +``` + + diff --git a/docs/docs/api/knowledge.md b/docs/docs/api/knowledge.md new file mode 100644 index 000000000..f2bdf2b32 --- /dev/null +++ b/docs/docs/api/knowledge.md @@ -0,0 +1,657 @@ +# Knowledge + +Get started with the Knowledge API + +# Chat Knowledge Space + +```python +POST /api/v2/chat/completions +``` +### Examples + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +### Chat Knowledge + + + + + + +```shell + DBGPT_API_KEY="dbgpt" + SPACE_NAME="{YOUR_SPACE_NAME}" + + curl --location --request POST 'http://127.0.0.1:5000/api/v2/chat/completions' \ +--header 'Authorization: Bearer $DBGPT_API_KEY' \ +--header 'Content-Type: application/json' \ +--data-raw '{ + + "model": "chatgpt_proxyllm", + "messages": "introduce awel", + "chat_mode":"chat_knowledge", + "chat_param":$SPACE_NAME +}' + +``` + + + + +```python +from dbgpt.client.client import Client + +DBGPT_API_KEY = "dbgpt" +SPACE_NAME="{YOUR_SPACE_NAME}" + +client = Client(api_key=DBGPT_API_KEY) +response = client.chat_stream(messages="Hello", model="chatgpt_proxyllm", chat_mode="chat_knowledge", chat_param=SPACE_NAME) +``` + + + +#### Chat Completion Response +```json +{ + "id": "acb050ab-eb2c-4754-97e4-6f3b94b7dac2", + "object": "chat.completion", + "created": 1710917272, + "model": "chatgpt_proxyllm", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Agentic Workflow Expression Language (AWEL) is a specialized language designed for developing large model applications with intelligent agent workflows. It offers flexibility and functionality, allowing developers to focus on business logic for LLMs applications without getting bogged down in model and environment details. AWEL uses a layered API design architecture, making it easier to work with. You can find examples and source code to get started with AWEL, and it supports various operators and environments. AWEL is a powerful tool for building native data applications through workflows and agents." + }, + "finish_reason": null + } + ], + "usage": { + "prompt_tokens": 0, + "total_tokens": 0, + "completion_tokens": 0 + } +} +``` + +#### Chat Completion Stream Response +```commandline +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "AW"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "EL"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " which"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " stands"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " for"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Ag"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "entic"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Workflow"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Expression"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Language"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " is"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " a"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " powerful"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " tool"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " designed"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " for"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " developing"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " large"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " model"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " applications"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " It"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " simpl"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "ifies"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " the"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " process"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " by"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " allowing"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " developers"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " to"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " focus"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " on"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " business"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " logic"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " without"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " getting"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " bog"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "ged"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " down"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " in"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " complex"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " model"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " environment"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " details"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " AW"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "EL"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " offers"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " great"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " functionality"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " flexibility"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " through"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " its"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " layered"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " API"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " design"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " architecture"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " It"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " provides"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " a"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " set"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " of"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " intelligent"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " agent"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " workflow"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " expression"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " language"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " that"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " enhances"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " efficiency"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " in"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " application"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " development"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " If"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " you"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " want"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " to"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " learn"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " more"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " about"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " AW"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "EL"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " you"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " can"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " check"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " out"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " the"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " built"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "-in"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " examples"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " resources"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " available"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " on"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " platforms"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " like"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Github"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Docker"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "hub"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " more"}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]} + +data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "\n\n"}}]} + +data: [DONE] +``` +### Create Knowledge Space + +```python +POST /api/v2/serve/knowledge/spaces +``` + + + + + + + +```shell + DBGPT_API_KEY="dbgpt" + + curl --location --request POST 'http://localhost:5000/api/v2/serve/knowledge/spaces' \ +--header 'Authorization: Bearer $DBGPT_API_KEY' \ +--header 'Content-Type: application/json' \ +--data-raw '{"desc": "for client space desc", "name": "test_space_2", "owner": "dbgpt", "vector_type": "Chroma" +}' +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.knowledge import create_space +from dbgpt.client.schemas import SpaceModel + +DBGPT_API_KEY = "dbgpt" + +client = Client(api_key=DBGPT_API_KEY) +res = await create_space(client,SpaceModel( + name="test_space", + vector_type="Chroma", + desc="for client space", + owner="dbgpt")) + +``` + + + + +#### Request body + +________ +name string Required + +knowledge space name +________ +vector_type string Required + +vector db type, `Chroma`, `Milvus`, default is `Chroma` +________ +desc string Optional + +description of the knowledge space +________ +owner integer Optional + +The owner of the knowledge space +________ +context integer Optional + +The context of the knowledge space argument +________ + +#### Response body +Return Space Object + +### Update Knowledge Space + +```python +PUT /api/v2/serve/knowledge/spaces +``` + + + + + +```shell + DBGPT_API_KEY="dbgpt" + + curl --location --request PUT 'http://localhost:5000/api/v2/serve/knowledge/spaces' \ +--header 'Authorization: Bearer $DBGPT_API_KEY' \ +--header 'Content-Type: application/json' \ +--data-raw '{"desc": "for client space desc v2", "id": "49", "name": "test_space_2", "owner": "dbgpt", "vector_type": "Chroma" +}' +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.knowledge import update_space +from dbgpt.client.schemas import SpaceModel + +DBGPT_API_KEY = "dbgpt" + +client = Client(api_key=DBGPT_API_KEY) +res = await update_space(client, SpaceModel( + name="test_space", + vector_type="Chroma", + desc="for client space update", + owner="dbgpt")) + +``` + + + + +#### Request body + +________ +id string Required + +knowledge space id +________ +name string Required + +knowledge space name +________ +vector_type string Optional + +vector db type, `Chroma`, `Milvus`, default is `Chroma` +________ +desc string Optional + +description of the knowledge space +________ +owner integer Optional + +The owner of the knowledge space +________ +context integer Optional + +The context of the knowledge space argument +________ + +#### Response body +Return Space Object + +### Delete Knowledge Space + +```python +DELETE /api/v2/serve/knowledge/spaces +``` + + + + + + +```shell + DBGPT_API_KEY="dbgpt" + SPACE_ID="{YOUR_SPACE_ID}" + + curl -X DELETE "http://localhost:5000/api/v2/serve/knowledge/spaces/$SPACE_ID" \ + -H "Authorization: Bearer $DBGPT_API_KEY" \ + -H "accept: application/json" \ + -H "Content-Type: application/json" \ +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.knowledge import delete_space + +DBGPT_API_KEY = "dbgpt" +space_id = "{your_space_id}" + +client = Client(api_key=DBGPT_API_KEY) +res = await delete_space(client=client, space_id=space_id) + +``` + + + + +#### Delete Parameters +________ +id string Required + +knowledge space id +________ + +#### Response body +Return Space Object + +### Get Knowledge Space + +```python +GET /api/v2/serve/knowledge/spaces/{space_id} +``` + + + + + +```shell + DBGPT_API_KEY="dbgpt" + SPACE_ID="{YOUR_SPACE_ID}" + + curl --location --request GET 'http://localhost:5000/api/v2/serve/knowledge/spaces/$SPACE_ID' \ + --header 'Authorization: Bearer $DBGPT_API_KEY' +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.knowledge import get_space + +DBGPT_API_KEY = "dbgpt" +space_id = "{your_space_id}" + +client = Client(api_key=DBGPT_API_KEY) +res = await get_space(client=client, space_id=space_id) + +``` + + + + +#### Query Parameters +________ +id string Required + +knowledge space id +________ + +#### Response body +Return Space Object + +### List Knowledge Space + +```python +GET /api/v2/serve/knowledge/spaces +``` + + + + + +```shell + DBGPT_API_KEY="dbgpt" + +curl --location --request GET 'http://localhost:5000/api/v2/serve/knowledge/spaces' \ +--header 'Authorization: Bearer dbgpt' +``` + + + + + +```python +from dbgpt.client.client import Client +from dbgpt.client.knowledge import list_space + +DBGPT_API_KEY = "dbgpt" +space_id = "{your_space_id}" + +client = Client(api_key=DBGPT_API_KEY) +res = await list_space(client=client) + +``` + + + + +#### Response body +Return Space Object List + +### The Space Object + +________ +id string + +space id +________ +name string + +knowledge space name +________ +vector_type string + +vector db type, `Chroma`, `Milvus`, default is `Chroma` +________ +desc string Optional + +description of the knowledge space +________ +owner integer Optional + +The owner of the knowledge space +________ +context integer Optional + +The context of the knowledge space argument +________ \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index ecb2dbec6..4f76fdcc8 100755 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -356,6 +356,39 @@ const sidebars = { }, }, + { + type: "category", + label: "API Reference", + collapsed: false, + collapsible: false, + items: [ + { + type: 'doc', + id: 'api/introduction' + }, + { + type: 'doc', + id: 'api/chat' + }, + { + type: 'doc', + id: 'api/app' + }, + { + type: 'doc', + id: 'api/flow' + }, + { + type: 'doc', + id: 'api/knowledge' + }, + ], + link: { + type: 'generated-index', + slug: "api", + }, + }, + { type: "category", label: "Modules", diff --git a/examples/client/app_crud_example.py b/examples/client/app_crud_example.py index f634959a5..2f7e9bae5 100644 --- a/examples/client/app_crud_example.py +++ b/examples/client/app_crud_example.py @@ -27,7 +27,7 @@ async def main(): DBGPT_API_KEY = "dbgpt" client = Client(api_key=DBGPT_API_KEY) res = await list_app(client) - print(res.json()) + print(res) if __name__ == "__main__": diff --git a/examples/client/flow_crud_example.py b/examples/client/flow_crud_example.py index 90834b7e0..29dc95f87 100644 --- a/examples/client/flow_crud_example.py +++ b/examples/client/flow_crud_example.py @@ -1,6 +1,5 @@ import asyncio -from dbgpt.client.app import list_app from dbgpt.client.client import Client from dbgpt.client.flow import list_flow @@ -40,7 +39,7 @@ async def main(): DBGPT_API_KEY = "dbgpt" client = Client(api_key=DBGPT_API_KEY) res = await list_flow(client) - print(res.json()) + print(res) if __name__ == "__main__": diff --git a/examples/client/knowledge_crud_example.py b/examples/client/knowledge_crud_example.py index 17a2fed01..b00d01e53 100644 --- a/examples/client/knowledge_crud_example.py +++ b/examples/client/knowledge_crud_example.py @@ -1,7 +1,8 @@ import asyncio from dbgpt.client.client import Client -from dbgpt.client.knowledge import list_space +from dbgpt.client.knowledge import create_space +from dbgpt.client.schemas import SpaceModel """Client: Simple Knowledge CRUD example @@ -72,9 +73,20 @@ async def main(): DBGPT_API_KEY = "dbgpt" client = Client(api_key=DBGPT_API_KEY) + res = await create_space( + client, + SpaceModel( + name="test_space_1", + vector_type="Chroma", + desc="for client space desc", + owner="dbgpt", + ), + ) + print(res) + # list all spaces - res = await list_space(client) - print(res.json()) + # res = await list_space(client) + # print(res) # get space # res = await get_space(client, space_id='5') @@ -86,7 +98,8 @@ async def main(): # res = await update_space(client, SpaceModel(name="test_space", vector_type="Chroma", desc="for client space333", owner="dbgpt")) # delete space - # res = await delete_space(client, space_id='37') + # res = await delete_space(client, space_id='31') + # print(res) # list all documents # res = await list_document(client) @@ -102,7 +115,7 @@ async def main(): # , doc_file=('your_file_name', open('{your_file_path}', 'rb')))) # sync document - # res = await sync_document(client, sync_model=SyncModel(doc_id="153", space_id="40", model_name="text2vec", chunk_parameters=ChunkParameters(chunk_strategy="Automatic"))) + # res = await sync_document(client, sync_model=SyncModel(doc_id="157", space_id="49", model_name="text2vec", chunk_parameters=ChunkParameters(chunk_strategy="Automatic"))) if __name__ == "__main__":