You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
This example demonstrates how to use the dbgpt client to create, get, update, and
delete knowledge spaces and documents.
Example:
.. code-block:: python
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
# 1. Create a space
res = await create_space(
client,
SpaceModel(
name="test_space",
vector_type="Chroma",
desc="for client space",
owner="dbgpt",
),
)
# 2. Update a space
res = await update_space(
client,
SpaceModel(
name="test_space",
vector_type="Chroma",
desc="for client space333",
owner="dbgpt",
),
)
# 3. Delete a space
res = await delete_space(client, space_id="37")
# 4. Get a space
res = await get_space(client, space_id="5")
# 5. List all spaces
res = await list_space(client)
# 6. Create a document
res = await create_document(
client,
DocumentModel(
space_id="5",
doc_name="test_doc",
doc_type="TEXT",
doc_content="test content",
doc_source="",
),
)
# 7. Sync a document
res = await sync_document(
client,
sync_model=SyncModel(
doc_id="153",
space_id="40",
model_name="text2vec",
chunk_parameters=ChunkParameters(chunk_strategy="Automatic"),
),
)
# 8. Get a document
res = await get_document(client, "52")
# 9. List all documents
res = await list_document(client)
# 10. Delete a document
res = await delete_document(client, "150")
"""
import asyncio
from dbgpt_client import Client
from dbgpt_client.knowledge import *
from dbgpt_client.schema import SpaceModelNow
from fastapi import File, UploadFile
# # list all spaces
# res = await list_space(client)
# print(res)
# get space
# res = await get_space(client, space_id='5')
# create space
# res = await create_space(client, SpaceModel(name="test_space", vector_type="Chroma", desc="for client space", owner="dbgpt"))
# update space
# res = await update_space(client, SpaceModelNow(name="test_space_1", vector_type="Chroma", desc="for client space444", owner="dbgpt"))
# delete space
# res = await delete_space(client, space_id='31')
# print(res)
# list all documents
# res = await list_document(client)
# print(res)
# get document
# res = await get_document(client, "52")
# delete document
# res = await delete_document(client, "150")
# create document
from dbgpt._private.pydantic import Field
res = await create_document(
client,
DocumentModel(
id=1,
space_id="1",
body={
Field("space_id", description="body name"): Field('1', description="body name"),
Field("doc_type", description="body name"): Field('TEXT', description="body name"),
Field("doc_name", description="body name"): Field('test_doc', description="body name")
},
doc_name="test_doc",
doc_type="TEXT",
content="test content",
doc_file=UploadFile(
file=open('C:\\Users\\12073\\Desktop\\新建 文本文档 (3).txt', 'rb'),
filename="knowledge.txt"
),
doc_source="local"
)
)
# doc_file=(
# '知识表', open('C:\\Users\\12073\\Desktop\\新建 文本文档 (3).txt', 'rb')),
# )
# sync document
# res = await sync_document(client, sync_model=SyncModel(doc_id="157", space_id="49", model_name="text2vec", chunk_parameters=ChunkParameters(chunk_strategy="Automatic")))
finally:
await client.aclose() # 异步关闭
if name == "main":
asyncio.run(main())
C:\WWW\DB-GPT-v0.7.0.venv\Scripts\python.exe C:\WWW\DB-GPT-v0.7.0\examples\client\knowledge_crud_example.py
Traceback (most recent call last):
File "C:\WWW\DB-GPT-v0.7.0\examples\client\knowledge_crud_example.py", line 68, in
from dbgpt_client import Client
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client_init_.py", line 3, in
from .client import Client, ClientException # noqa: F401
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\client.py", line 14, in
from .schema import ChatCompletionRequestBody
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\schema.py", line 265, in
class DocumentModel(BaseModel):
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_model_construction.py", line 224, in new
complete_model_class(
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_model_construction.py", line 602, in complete_model_class
schema = cls.get_pydantic_core_schema(cls, handler)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic\main.py", line 702, in get_pydantic_core_schema
return handler(source)
^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_schema_generation_shared.py", line 84, in call
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 879, in _generate_schema_inner
return self._model_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 691, in _model_schema
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 691, in
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1071, in _generate_md_field_schema
common_field = self._common_field_schema(name, field_info, decorators)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1263, in _common_field_schema
schema = self._apply_annotations(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 2056, in _apply_annotations
schema = get_inner_schema(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_schema_generation_shared.py", line 84, in call
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 2037, in inner_handler
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 986, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1014, in _match_generic_type
return self._union_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1325, in _union_schema
choices.append(self.generate_schema(arg))
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 986, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1024, in _match_generic_type
return self._dict_schema(*self._get_first_two_args_or_any(obj))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 370, in _dict_schema
return core_schema.dict_schema(self.generate_schema(keys_type), self.generate_schema(values_type))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 975, in match_type
return self._call_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1818, in _call_schema
type_hints = _typing_extra.get_function_type_hints(function, globalns=globalns, localns=localns)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 730, in get_function_type_hints
type_hints[name] = eval_type_backport(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 609, in eval_type_backport
return _eval_type_backport(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 633, in _eval_type_backport
return _eval_type(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 667, in _eval_type
return typing._eval_type( # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\12073\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\typing.py", line 395, in _eval_type
return t._evaluate(globalns, localns, recursive_guard)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\12073\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\typing.py", line 905, in _evaluate
eval(self.forward_code, globalns, localns),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "", line 1, in
NameError: name 'Callable' is not defined. Did you mean: 'callable'?
进程已结束,退出代码为 1
需要先修复的文件
C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\schema.py
"""this module contains the schemas for the dbgpt client."""
import json
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional, Union
from fastapi import File, UploadFile
from dbgpt._private.pydantic import BaseModel, ConfigDict, Field
from dbgpt_ext.rag.chunk_manager import ChunkParameters
class ChatCompletionRequestBody(BaseModel):
"""ChatCompletion LLM http request body."""
model: str = Field(
..., description="The model name", examples=["gpt-3.5-turbo", "proxyllm"]
)
messages: Union[str, List[str]] = Field(
..., description="User input messages", examples=["Hello", "How are you?"]
)
stream: bool = Field(default=True, description="Whether return stream")
temperature: Optional[float] = Field(
default=None,
description="What sampling temperature to use, between 0 and 2. Higher values "
"like 0.8 will make the output more random, "
"while lower values like 0.2 will "
"make it more focused and deterministic.",
)
max_new_tokens: Optional[int] = Field(
default=None,
description="The maximum number of tokens that can be generated in the chat "
"completion.",
)
conv_uid: Optional[str] = Field(
default=None, description="The conversation id of the model inference"
)
span_id: Optional[str] = Field(
default=None, description="The span id of the model inference"
)
chat_mode: Optional[str] = Field(
default="chat_normal",
description="The chat mode",
examples=["chat_awel_flow", "chat_normal"],
)
chat_param: Optional[str] = Field(
default=None,
description="The chat param of chat mode",
)
user_name: Optional[str] = Field(
default=None, description="The user name of the model inference"
)
sys_code: Optional[str] = Field(
default=None, description="The system code of the model inference"
)
incremental: bool = Field(
default=True,
description="Used to control whether the content is returned incrementally "
"or in full each time. "
"If this parameter is not provided, the default is full return.",
)
enable_vis: bool = Field(
default=True, description="response content whether to output vis label"
)
C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\knowledge.py
"""Knowledge API client."""
import json
from typing import List
from dbgpt._private.pydantic import model_to_dict, model_to_json
from dbgpt.core.schema.api import Result
from .client import Client, ClientException
from .schema import DocumentModel, SpaceModelNow, SyncModel
async def create_space(client: Client, space_model: SpaceModelNow) -> SpaceModelNow:
"""Create a new space.
Args:
client (Client): The dbgpt client.
space_model (SpaceModelNow): The space model.
Returns:
SpaceModelNow: The space model.
Raises:
ClientException: If the request failed.
"""
try:
res = await client.post("/knowledge/spaces", model_to_dict(space_model))
result: Result = res.json()
if result["success"]:
return SpaceModelNow(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to create space: {e}")
Args:
client (Client): The dbgpt client.
space_model (SpaceModelNow): The space model.
Returns:
SpaceModelNow: The space model.
Raises:
ClientException: If the request failed.
"""
try:
res = await client.put("/knowledge/spaces", model_to_dict(space_model))
result: Result = res.json()
if result["success"]:
print(result)
return SpaceModelNow(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to update space: {e}")
Args:
client (Client): The dbgpt client.
space_id (str): The space id.
Returns:
SpaceModelNow: The space model.
Raises:
ClientException: If the request failed.
"""
try:
res = await client.delete("/knowledge/spaces/" + space_id)
result: Result = res.json()
if result["success"]:
return SpaceModelNow(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to delete space: {e}")
Args:
client (Client): The dbgpt client.
space_id (str): The space id.
Returns:
SpaceModelNow: The space model.
Raises:
ClientException: If the request failed.
"""
try:
res = await client.get("/knowledge/spaces/" + space_id)
result: Result = res.json()
if result["success"]:
return SpaceModelNow(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to get space: {e}")
Args:
client (Client): The dbgpt client.
Returns:
List[SpaceModelNow]: The list of space models.
Raises:
ClientException: If the request failed.
"""
try:
res = await client.get("/knowledge/spaces")
result: Result = res.json()
if result["success"]:
return [SpaceModelNow(**space) for space in result["data"]["items"]]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list spaces: {e}")
async def create_document(client: Client, doc_model: DocumentModel) -> DocumentModel:
"""Create a new document.
Args:
client (Client): The dbgpt client.
doc_model (SpaceModelNow): The document model.
"""
try:
res = await client.post_param("/knowledge/documents", model_to_dict(doc_model))
result: Result = res.json()
if result["success"]:
return DocumentModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to create document: {e}")
Args:
client (Client): The dbgpt client.
document_id (str): The document id.
Returns:
DocumentModel: The document model.
Raises:
ClientException: If the request failed.
"""
try:
res = await client.get("/knowledge/documents/" + document_id)
result: Result = res.json()
if result["success"]:
return DocumentModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to get document: {e}")
Args:
client (Client): The dbgpt client.
"""
try:
res = await client.get("/knowledge/documents")
result: Result = res.json()
if result["success"]:
return [DocumentModel(**document) for document in result["data"]["items"]]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list documents: {e}")
Args:
client (Client): The dbgpt client.
sync_model (SyncModel): The sync model.
Returns:
List: The list of document ids.
Raises:
ClientException: If the request failed.
"""
try:
res = await client.post(
"/knowledge/documents/sync", [json.loads(model_to_json(sync_model))]
)
result: Result = res.json()
if result["success"]:
return result["data"]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list documents: {e}")
Are you willing to submit PR?
Yes I am willing to submit a PR!
The text was updated successfully, but these errors were encountered:
Search before asking
Operating system information
Windows
Python version information
DB-GPT version
main
Related scenes
Installation Information
Installation From Source
Docker Installation
Docker Compose Installation
Cluster Installation
AutoDL Image
Other
Device information
have nothing to do with
Models information
have nothing to do with
What happened
have nothing to do with
What you expected to happen
have nothing to do with
How to reproduce
have nothing to do with
Additional context
执行以下代码遇到了报错
"""Client: Simple Knowledge CRUD example.
This example demonstrates how to use the dbgpt client to create, get, update, and
delete knowledge spaces and documents.
Example:
.. code-block:: python
"""
import asyncio
from dbgpt_client import Client
from dbgpt_client.knowledge import *
from dbgpt_client.schema import SpaceModelNow
from fastapi import File, UploadFile
async def main():
# initialize client
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
try:
# res = await create_space(
# client,
# SpaceModelNow(
# name="test_space_1",
# vector_type="Chroma",
# desc="for client space desc",
# owner="dbgpt",
# ),
# )
# print(res)
if name == "main":
asyncio.run(main())
C:\WWW\DB-GPT-v0.7.0.venv\Scripts\python.exe C:\WWW\DB-GPT-v0.7.0\examples\client\knowledge_crud_example.py
Traceback (most recent call last):
File "C:\WWW\DB-GPT-v0.7.0\examples\client\knowledge_crud_example.py", line 68, in
from dbgpt_client import Client
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client_init_.py", line 3, in
from .client import Client, ClientException # noqa: F401
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\client.py", line 14, in
from .schema import ChatCompletionRequestBody
File "C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\schema.py", line 265, in
class DocumentModel(BaseModel):
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_model_construction.py", line 224, in new
complete_model_class(
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_model_construction.py", line 602, in complete_model_class
schema = cls.get_pydantic_core_schema(cls, handler)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic\main.py", line 702, in get_pydantic_core_schema
return handler(source)
^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_schema_generation_shared.py", line 84, in call
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 879, in _generate_schema_inner
return self._model_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 691, in _model_schema
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 691, in
{k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()},
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1071, in _generate_md_field_schema
common_field = self._common_field_schema(name, field_info, decorators)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1263, in _common_field_schema
schema = self._apply_annotations(
^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 2056, in _apply_annotations
schema = get_inner_schema(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_schema_generation_shared.py", line 84, in call
schema = self._handler(source_type)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 2037, in inner_handler
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 986, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1014, in _match_generic_type
return self._union_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1325, in _union_schema
choices.append(self.generate_schema(arg))
^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 986, in match_type
return self._match_generic_type(obj, origin)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1024, in _match_generic_type
return self._dict_schema(*self._get_first_two_args_or_any(obj))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 370, in _dict_schema
return core_schema.dict_schema(self.generate_schema(keys_type), self.generate_schema(values_type))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 610, in generate_schema
schema = self._generate_schema_inner(obj)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 884, in _generate_schema_inner
return self.match_type(obj)
^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 975, in match_type
return self._call_schema(obj)
^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_generate_schema.py", line 1818, in _call_schema
type_hints = _typing_extra.get_function_type_hints(function, globalns=globalns, localns=localns)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 730, in get_function_type_hints
type_hints[name] = eval_type_backport(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 609, in eval_type_backport
return _eval_type_backport(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 633, in _eval_type_backport
return _eval_type(value, globalns, localns, type_params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\WWW\DB-GPT-v0.7.0.venv\Lib\site-packages\pydantic_internal_typing_extra.py", line 667, in _eval_type
return typing._eval_type( # type: ignore
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\12073\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\typing.py", line 395, in _eval_type
return t._evaluate(globalns, localns, recursive_guard)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\12073\AppData\Roaming\uv\python\cpython-3.11.11-windows-x86_64-none\Lib\typing.py", line 905, in _evaluate
eval(self.forward_code, globalns, localns),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "", line 1, in
NameError: name 'Callable' is not defined. Did you mean: 'callable'?
进程已结束,退出代码为 1
需要先修复的文件
C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\schema.py
"""this module contains the schemas for the dbgpt client."""
import json
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional, Union
from fastapi import File, UploadFile
from dbgpt._private.pydantic import BaseModel, ConfigDict, Field
from dbgpt_ext.rag.chunk_manager import ChunkParameters
class ChatCompletionRequestBody(BaseModel):
"""ChatCompletion LLM http request body."""
class ChatMode(Enum):
"""Chat mode."""
class AWELTeamModel(BaseModel):
"""AWEL team model."""
class AgentResourceType(Enum):
"""Agent resource type."""
class AgentResourceModel(BaseModel):
"""Agent resource model."""
class AppDetailModel(BaseModel):
"""App detail model."""
class AppModel(BaseModel):
"""App model."""
class SpaceModel(BaseModel):
"""Space model."""
class SpaceModelNow(SpaceModel):
domain_type: Optional[str] = Field(
default='Normal',
description="space argument domain_type",
)
class DocumentModel(BaseModel):
"""Document model."""
class SyncModel(BaseModel):
"""Sync model."""
class DatasourceModel(BaseModel):
"""Datasource model."""
C:\WWW\DB-GPT-v0.7.0\packages\dbgpt-client\src\dbgpt_client\knowledge.py
"""Knowledge API client."""
import json
from typing import List
from dbgpt._private.pydantic import model_to_dict, model_to_json
from dbgpt.core.schema.api import Result
from .client import Client, ClientException
from .schema import DocumentModel, SpaceModelNow, SyncModel
async def create_space(client: Client, space_model: SpaceModelNow) -> SpaceModelNow:
"""Create a new space.
async def update_space(client: Client, space_model: SpaceModelNow) -> SpaceModelNow:
"""Update a document.
async def delete_space(client: Client, space_id: str) -> SpaceModelNow:
"""Delete a space.
async def get_space(client: Client, space_id: str) -> SpaceModelNow:
"""Get a document.
async def list_space(client: Client) -> List[SpaceModelNow]:
"""List spaces.
async def create_document(client: Client, doc_model: DocumentModel) -> DocumentModel:
"""Create a new document.
async def delete_document(client: Client, document_id: str) -> DocumentModel:
"""Delete a document.
async def get_document(client: Client, document_id: str) -> DocumentModel:
"""Get a document.
async def list_document(client: Client) -> List[DocumentModel]:
"""List documents.
async def sync_document(client: Client, sync_model: SyncModel) -> List:
"""Sync document.
Are you willing to submit PR?
The text was updated successfully, but these errors were encountered: