Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Bug] [Konwledge] konwledge create_document error #2406

Open
3 of 15 tasks
15089677014 opened this issue Mar 6, 2025 · 0 comments
Open
3 of 15 tasks

[Bug] [Konwledge] konwledge create_document error #2406

15089677014 opened this issue Mar 6, 2025 · 0 comments
Labels
bug Something isn't working Waiting for reply

Comments

@15089677014
Copy link

Search before asking

  • I had searched in the issues and found no similar issues.

Operating system information

Windows

Python version information

=3.11

DB-GPT version

main

Related scenes

  • Chat Data
  • Chat Excel
  • Chat DB
  • Chat Knowledge
  • Model Management
  • Dashboard
  • Plugins

Installation Information

Device information

have nothing to do with

Models information

have nothing to do with

What happened

have nothing to do with

What you expected to happen

have nothing to do with

How to reproduce

have nothing to do with

Additional context

执行以下代码遇到了报错

"""Client: Simple Knowledge CRUD example.

This example demonstrates how to use the dbgpt client to create, get, update, and
delete knowledge spaces and documents.

Example:
.. code-block:: python

    DBGPT_API_KEY = "dbgpt"
    client = Client(api_key=DBGPT_API_KEY)
    # 1. Create a space
    res = await create_space(
        client,
        SpaceModel(
            name="test_space",
            vector_type="Chroma",
            desc="for client space",
            owner="dbgpt",
        ),
    )
    # 2. Update a space
    res = await update_space(
        client,
        SpaceModel(
            name="test_space",
            vector_type="Chroma",
            desc="for client space333",
            owner="dbgpt",
        ),
    )
    # 3. Delete a space
    res = await delete_space(client, space_id="37")
    # 4. Get a space
    res = await get_space(client, space_id="5")
    # 5. List all spaces
    res = await list_space(client)
    # 6. Create a document
    res = await create_document(
        client,
        DocumentModel(
            space_id="5",
            doc_name="test_doc",
            doc_type="TEXT",
            doc_content="test content",
            doc_source="",
        ),
    )
    # 7. Sync a document
    res = await sync_document(
        client,
        sync_model=SyncModel(
            doc_id="153",
            space_id="40",
            model_name="text2vec",
            chunk_parameters=ChunkParameters(chunk_strategy="Automatic"),
        ),
    )
    # 8. Get a document
    res = await get_document(client, "52")
    # 9. List all documents
    res = await list_document(client)
    # 10. Delete a document
    res = await delete_document(client, "150")

"""

import asyncio

from dbgpt_client import Client
from dbgpt_client.knowledge import *
from dbgpt_client.schema import SpaceModelNow
from fastapi import File, UploadFile

async def main():
# initialize client
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
try:
# res = await create_space(
# client,
# SpaceModelNow(
# name="test_space_1",
# vector_type="Chroma",
# desc="for client space desc",
# owner="dbgpt",
# ),
# )
# print(res)

    # # list all spaces
    # res = await list_space(client)
    # print(res)

    # get space
    # res = await get_space(client, space_id='5')

    # create space
    # res = await create_space(client, SpaceModel(name="test_space", vector_type="Chroma", desc="for client space", owner="dbgpt"))

    # update space
    # res = await update_space(client, SpaceModelNow(name="test_space_1", vector_type="Chroma", desc="for client space444", owner="dbgpt"))

    # delete space
    # res = await delete_space(client, space_id='31')
    # print(res)

    # list all documents
    # res = await list_document(client)
    # print(res)

    # get document
    # res = await get_document(client, "52")

    # delete document
    # res = await delete_document(client, "150")

    # create document
    from dbgpt._private.pydantic import Field
    res = await create_document(
        client,
        DocumentModel(
            id=1,
            space_id="1",
            body={
                Field("space_id", description="body name"): Field('1', description="body name"),
                Field("doc_type", description="body name"): Field('TEXT', description="body name"),
                Field("doc_name", description="body name"): Field('test_doc', description="body name")
            },
            doc_name="test_doc",
            doc_type="TEXT",
            content="test content",
            doc_file=UploadFile(
                file=open('C:\\Users\\12073\\Desktop\\新建 文本文档 (3).txt', 'rb'),
                filename="knowledge.txt"
            ),
            doc_source="local"
        )
    )

    # doc_file=(
    #     '知识表', open('C:\\Users\\12073\\Desktop\\新建 文本文档 (3).txt', 'rb')),
    # )
    # sync document
    # res = await sync_document(client, sync_model=SyncModel(doc_id="157", space_id="49", model_name="text2vec", chunk_parameters=ChunkParameters(chunk_strategy="Automatic")))
finally:
    await client.aclose()  # 异步关闭

if name == "main":
asyncio.run(main())

C:\WWW\DB-GPT-v0.7.0.venv\Scripts\python.exe C:\WWW\DB-GPT-v0.7.0\examples\client\knowledge_crud_example.py
Traceback (most recent call last):
File "C:\WWW\DB-GPT-v0.7.0\examples\client\knowledge_crud_example.py", line 68, in
from dbgpt_client import Client
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client_init_.py", line 3, in
from .client import Client, ClientException # noqa: F401
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\client.py", line 14, in
from .schema import ChatCompletionRequestBody
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\schema.py", line 265, in
class DocumentModel(BaseModel):
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_model_construction.py", line 224, in new
complete_model_class(
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_model_construction.py", line 602, in complete_model_class
schema = cls.get_pydantic_core_schema(cls, handler)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic\main.py", line 702, in get_pydantic_core_schema
return handler(source)
^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_schema_generation_shared.py", line 84, in call
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 879, in _generate_schema_inner
return self._model_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 691, in _model_schema
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 691, in
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1071, in _generate_md_field_schema
common_field = self._common_field_schema(name, field_info, decorators)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1263, in _common_field_schema
schema = self._apply_annotations(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 2056, in _apply_annotations
schema = get_inner_schema(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_schema_generation_shared.py", line 84, in call
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 2037, in inner_handler
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 986, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1014, in _match_generic_type
return self._union_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1325, in _union_schema
choices.append(self.generate_schema(arg))
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 986, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1024, in _match_generic_type
return self._dict_schema(*self._get_first_two_args_or_any(obj))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 370, in _dict_schema
return core_schema.dict_schema(self.generate_schema(keys_type), self.generate_schema(values_type))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 975, in match_type
return self._call_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1818, in _call_schema
type_hints = _typing_extra.get_function_type_hints(function, globalns=globalns, localns=localns)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 730, in get_function_type_hints
type_hints[name] = eval_type_backport(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 609, in eval_type_backport
return _eval_type_backport(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 633, in _eval_type_backport
return _eval_type(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 667, in _eval_type
return typing._eval_type( # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\12073\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\typing.py", line 395, in _eval_type
return t._evaluate(globalns, localns, recursive_guard)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\12073\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\typing.py", line 905, in _evaluate
eval(self.forward_code, globalns, localns),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "", line 1, in
NameError: name 'Callable' is not defined. Did you mean: 'callable'?

进程已结束,退出代码为 1

需要先修复的文件

C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\schema.py
"""this module contains the schemas for the dbgpt client."""

import json
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional, Union

from fastapi import File, UploadFile

from dbgpt._private.pydantic import BaseModel, ConfigDict, Field
from dbgpt_ext.rag.chunk_manager import ChunkParameters

class ChatCompletionRequestBody(BaseModel):
"""ChatCompletion LLM http request body."""

model: str = Field(
    ..., description="The model name", examples=["gpt-3.5-turbo", "proxyllm"]
)
messages: Union[str, List[str]] = Field(
    ..., description="User input messages", examples=["Hello", "How are you?"]
)
stream: bool = Field(default=True, description="Whether return stream")

temperature: Optional[float] = Field(
    default=None,
    description="What sampling temperature to use, between 0 and 2. Higher values "
    "like 0.8 will make the output more random, "
    "while lower values like 0.2 will "
    "make it more focused and deterministic.",
)
max_new_tokens: Optional[int] = Field(
    default=None,
    description="The maximum number of tokens that can be generated in the chat "
    "completion.",
)
conv_uid: Optional[str] = Field(
    default=None, description="The conversation id of the model inference"
)
span_id: Optional[str] = Field(
    default=None, description="The span id of the model inference"
)
chat_mode: Optional[str] = Field(
    default="chat_normal",
    description="The chat mode",
    examples=["chat_awel_flow", "chat_normal"],
)
chat_param: Optional[str] = Field(
    default=None,
    description="The chat param of chat mode",
)
user_name: Optional[str] = Field(
    default=None, description="The user name of the model inference"
)
sys_code: Optional[str] = Field(
    default=None, description="The system code of the model inference"
)
incremental: bool = Field(
    default=True,
    description="Used to control whether the content is returned incrementally "
    "or in full each time. "
    "If this parameter is not provided, the default is full return.",
)
enable_vis: bool = Field(
    default=True, description="response content whether to output vis label"
)

class ChatMode(Enum):
"""Chat mode."""

CHAT_NORMAL = "chat_normal"
CHAT_APP = "chat_app"
CHAT_AWEL_FLOW = "chat_flow"
CHAT_KNOWLEDGE = "chat_knowledge"
CHAT_DATA = "chat_data"

class AWELTeamModel(BaseModel):
"""AWEL team model."""

dag_id: str = Field(
    ...,
    description="The unique id of dag",
    examples=["flow_dag_testflow_66d8e9d6-f32e-4540-a5bd-ea0648145d0e"],
)
uid: str = Field(
    default=None,
    description="The unique id of flow",
    examples=["66d8e9d6-f32e-4540-a5bd-ea0648145d0e"],
)
name: Optional[str] = Field(
    default=None,
    description="The name of dag",
)
label: Optional[str] = Field(
    default=None,
    description="The label of dag",
)
version: Optional[str] = Field(
    default=None,
    description="The version of dag",
)
description: Optional[str] = Field(
    default=None,
    description="The description of dag",
)
editable: bool = Field(
    default=False,
    description="is the dag is editable",
    examples=[True, False],
)
state: Optional[str] = Field(
    default=None,
    description="The state of dag",
)
user_name: Optional[str] = Field(
    default=None,
    description="The owner of current dag",
)
sys_code: Optional[str] = Field(
    default=None,
    description="The system code of current dag",
)
flow_category: Optional[str] = Field(
    default="common",
    description="The flow category of current dag",
)

class AgentResourceType(Enum):
"""Agent resource type."""

DB = "database"
Knowledge = "knowledge"
Internet = "internet"
Plugin = "plugin"
TextFile = "text_file"
ExcelFile = "excel_file"
ImageFile = "image_file"
AWELFlow = "awel_flow"

class AgentResourceModel(BaseModel):
"""Agent resource model."""

type: str
name: str
value: str
is_dynamic: bool = (
    False  # Is the current resource predefined or dynamically passed in?
)

@staticmethod
def from_dict(d: Dict[str, Any]):
    """From dict."""
    if d is None:
        return None
    return AgentResourceModel(
        type=d.get("type"),
        name=d.get("name"),
        introduce=d.get("introduce"),
        value=d.get("value", None),
        is_dynamic=d.get("is_dynamic", False),
    )

@staticmethod
def from_json_list_str(d: Optional[str]):
    """From json list str."""
    if d is None:
        return None
    try:
        json_array = json.loads(d)
    except Exception as e:
        raise ValueError(f"Illegal AgentResource json string!{d},{e}")
    return [AgentResourceModel.from_dict(item) for item in json_array]

def to_dict(self) -> Dict[str, Any]:
    """To dict."""
    temp = self.dict()
    for field, value in temp.items():
        if isinstance(value, Enum):
            temp[field] = value.value
    return temp

class AppDetailModel(BaseModel):
"""App detail model."""

app_code: Optional[str] = Field(None, description="app code")
app_name: Optional[str] = Field(None, description="app name")
agent_name: Optional[str] = Field(None, description="agent name")
node_id: Optional[str] = Field(None, description="node id")
resources: Optional[list[AgentResourceModel]] = Field(None, description="resources")
prompt_template: Optional[str] = Field(None, description="prompt template")
llm_strategy: Optional[str] = Field(None, description="llm strategy")
llm_strategy_value: Optional[str] = Field(None, description="llm strategy value")
created_at: datetime = datetime.now()
updated_at: datetime = datetime.now()

class AppModel(BaseModel):
"""App model."""

app_code: Optional[str] = Field(None, title="app code")
app_name: Optional[str] = Field(None, title="app name")
app_describe: Optional[str] = Field(None, title="app describe")
team_mode: Optional[str] = Field(None, title="team mode")
language: Optional[str] = Field("en", title="language")
team_context: Optional[Union[str, dict]] = Field(None, title="team context")
user_code: Optional[str] = Field(None, title="user code")
sys_code: Optional[str] = Field(None, title="sys code")
is_collected: Optional[str] = Field(None, title="is collected")
icon: Optional[str] = Field(None, title="icon")
created_at: datetime = datetime.now()
updated_at: datetime = datetime.now()
details: List[AppDetailModel] = Field([], title="app details")

class SpaceModel(BaseModel):
"""Space model."""

id: Optional[int] = Field(
    default=None,
    description="space id",
)
name: Optional[str] = Field(
    default=None,
    description="knowledge space name",
)
vector_type: Optional[str] = Field(
    default=None,
    description="vector type",
)
desc: Optional[str] = Field(
    default=None,
    description="space description",
)
owner: Optional[str] = Field(
    default=None,
    description="space owner",
)
context: Optional[str] = Field(
    default=None,
    description="space argument context",
)

class SpaceModelNow(SpaceModel):
domain_type: Optional[str] = Field(
default='Normal',
description="space argument domain_type",
)

user_ids: Optional[str] = Field(
    default=None,
    description="space argument user_ids",
)

user_id: Optional[str] = Field(
    default=None,
    description="space argument user_id",
)

class DocumentModel(BaseModel):
"""Document model."""

id: int = Field(None, description="The doc id")
body: Optional[dict[Field, Field]] = Field(None, description="body name")

doc_name: str = Field(None, description="doc name")
"""doc_type: document type"""
doc_type: str = Field(None, description="The doc type")
"""content: description"""
content: str = Field(None, description="content")
"""doc file"""
doc_file: UploadFile = Field(File(None), description="doc file")
"""doc_source: doc source"""
doc_source: str = Field(None, description="doc source")
"""doc_source: doc source"""
space_id: str = Field(None, description="space_id")

class SyncModel(BaseModel):
"""Sync model."""

model_config = ConfigDict(protected_namespaces=())

"""doc_id: doc id"""
doc_id: str = Field(None, description="The doc id")

"""space id"""
space_id: str = Field(None, description="The space id")

"""model_name: model name"""
model_name: Optional[str] = Field(None, description="model name")

"""chunk_parameters: chunk parameters
"""
chunk_parameters: ChunkParameters = Field(None, description="chunk parameters")

class DatasourceModel(BaseModel):
"""Datasource model."""

id: Optional[int] = Field(None, description="The datasource id")
db_type: str = Field(..., description="Database type, e.g. sqlite, mysql, etc.")
db_name: str = Field(..., description="Database name.")
db_path: str = Field("", description="File path for file-based database.")
db_host: str = Field("", description="Database host.")
db_port: int = Field(0, description="Database port.")
db_user: str = Field("", description="Database user.")
db_pwd: str = Field("", description="Database password.")
comment: str = Field("", description="Comment for the database.")

C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\knowledge.py
"""Knowledge API client."""

import json
from typing import List

from dbgpt._private.pydantic import model_to_dict, model_to_json
from dbgpt.core.schema.api import Result

from .client import Client, ClientException
from .schema import DocumentModel, SpaceModelNow, SyncModel

async def create_space(client: Client, space_model: SpaceModelNow) -> SpaceModelNow:
"""Create a new space.

Args:
    client (Client): The dbgpt client.
    space_model (SpaceModelNow): The space model.
Returns:
    SpaceModelNow: The space model.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.post("/knowledge/spaces", model_to_dict(space_model))
    result: Result = res.json()
    if result["success"]:
        return SpaceModelNow(**result["data"])
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to create space: {e}")

async def update_space(client: Client, space_model: SpaceModelNow) -> SpaceModelNow:
"""Update a document.

Args:
    client (Client): The dbgpt client.
    space_model (SpaceModelNow): The space model.
Returns:
    SpaceModelNow: The space model.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.put("/knowledge/spaces", model_to_dict(space_model))
    result: Result = res.json()
    if result["success"]:
        print(result)
        return SpaceModelNow(**result["data"])
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to update space: {e}")

async def delete_space(client: Client, space_id: str) -> SpaceModelNow:
"""Delete a space.

Args:
    client (Client): The dbgpt client.
    space_id (str): The space id.
Returns:
    SpaceModelNow: The space model.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.delete("/knowledge/spaces/" + space_id)
    result: Result = res.json()
    if result["success"]:
        return SpaceModelNow(**result["data"])
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to delete space: {e}")

async def get_space(client: Client, space_id: str) -> SpaceModelNow:
"""Get a document.

Args:
    client (Client): The dbgpt client.
    space_id (str): The space id.
Returns:
    SpaceModelNow: The space model.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.get("/knowledge/spaces/" + space_id)
    result: Result = res.json()
    if result["success"]:
        return SpaceModelNow(**result["data"])
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to get space: {e}")

async def list_space(client: Client) -> List[SpaceModelNow]:
"""List spaces.

Args:
    client (Client): The dbgpt client.
Returns:
    List[SpaceModelNow]: The list of space models.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.get("/knowledge/spaces")
    result: Result = res.json()
    if result["success"]:
        return [SpaceModelNow(**space) for space in result["data"]["items"]]
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to list spaces: {e}")

async def create_document(client: Client, doc_model: DocumentModel) -> DocumentModel:
"""Create a new document.

Args:
    client (Client): The dbgpt client.
    doc_model (SpaceModelNow): The document model.

"""
try:
    res = await client.post_param("/knowledge/documents", model_to_dict(doc_model))
    result: Result = res.json()
    if result["success"]:
        return DocumentModel(**result["data"])
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to create document: {e}")

async def delete_document(client: Client, document_id: str) -> DocumentModel:
"""Delete a document.

Args:
    client (Client): The dbgpt client.
    document_id (str): The document id.
Returns:
    DocumentModel: The document model.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.delete("/knowledge/documents/" + document_id)
    result: Result = res.json()
    if result["success"]:
        return DocumentModel(**result["data"])
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to delete document: {e}")

async def get_document(client: Client, document_id: str) -> DocumentModel:
"""Get a document.

Args:
    client (Client): The dbgpt client.
    document_id (str): The document id.
Returns:
    DocumentModel: The document model.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.get("/knowledge/documents/" + document_id)
    result: Result = res.json()
    if result["success"]:
        return DocumentModel(**result["data"])
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to get document: {e}")

async def list_document(client: Client) -> List[DocumentModel]:
"""List documents.

Args:
    client (Client): The dbgpt client.
"""
try:
    res = await client.get("/knowledge/documents")
    result: Result = res.json()
    if result["success"]:
        return [DocumentModel(**document) for document in result["data"]["items"]]
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to list documents: {e}")

async def sync_document(client: Client, sync_model: SyncModel) -> List:
"""Sync document.

Args:
    client (Client): The dbgpt client.
    sync_model (SyncModel): The sync model.
Returns:
    List: The list of document ids.
Raises:
    ClientException: If the request failed.
"""
try:
    res = await client.post(
        "/knowledge/documents/sync", [json.loads(model_to_json(sync_model))]
    )
    result: Result = res.json()
    if result["success"]:
        return result["data"]
    else:
        raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
    raise ClientException(f"Failed to list documents: {e}")

Are you willing to submit PR?

  • Yes I am willing to submit a PR!
@15089677014 15089677014 added bug Something isn't working Waiting for reply labels Mar 6, 2025
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Something isn't working Waiting for reply
Projects
None yet
Development

No branches or pull requests

1 participant