Skip to content

Commit

Permalink
fix format bug
Browse files Browse the repository at this point in the history
  • Loading branch information
Jant1L committed Dec 13, 2024
1 parent fdc0068 commit a2a759f
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 19 deletions.
2 changes: 1 addition & 1 deletion dbgpt/rag/transformer/graph_embedder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import logging
from typing import List

from dbgpt.storage.graph_store.graph import Graph, GraphElemType
from dbgpt.rag.transformer.text2vector import Text2Vector
from dbgpt.storage.graph_store.graph import Graph, GraphElemType

logger = logging.getLogger(__name__)

Expand Down
7 changes: 4 additions & 3 deletions dbgpt/rag/transformer/text2vector.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
"""Text2Vector class."""

import logging
import dashscope
from http import HTTPStatus
from abc import ABC
from http import HTTPStatus
from typing import List

import dashscope

from dbgpt.rag.transformer.base import EmbedderBase

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -46,4 +47,4 @@ def truncate(self):
"""Do nothing by default."""

def drop(self):
"""Do nothing by default."""
"""Do nothing by default."""
21 changes: 10 additions & 11 deletions dbgpt/storage/knowledge_graph/community/tugraph_store_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ async def get_community(self, community_id: str) -> Community:
all_edge_graph = self.query(edge_query)
all_graph = MemoryGraph()
for vertex in all_vertex_graph.vertices():
vertex.del_prop("embedding")
all_graph.upsert_vertex(vertex)
for edge in all_edge_graph.edges():
all_graph.append_edge(edge)
Expand Down Expand Up @@ -150,7 +149,7 @@ def upsert_entities(self, entities: Iterator[Vertex]) -> None:
"_document_id": "0",
"_chunk_id": "0",
"_community_id": "0",
"embedding": entity.get_prop("embedding"),
"_embedding": entity.get_prop("embedding"),
}
for entity in entities
]
Expand All @@ -161,7 +160,7 @@ def upsert_entities(self, entities: Iterator[Vertex]) -> None:
)
create_vector_index_query = (
f"CALL db.addVertexVectorIndex("
f'"{GraphElemType.ENTITY.value}", "embedding", '
f'"{GraphElemType.ENTITY.value}", "_embedding", '
"{dimension: 512})"
)
self.graph_store.conn.run(query=entity_query)
Expand Down Expand Up @@ -204,7 +203,7 @@ def upsert_chunks(self, chunks: Iterator[Union[Vertex, ParagraphChunk]]) -> None
"id": self._escape_quotes(chunk.vid),
"name": self._escape_quotes(chunk.name),
"content": self._escape_quotes(chunk.get_prop("content")),
"embedding": chunk.get_prop("embedding"),
"_embedding": chunk.get_prop("embedding"),
}
for chunk in chunks
]
Expand All @@ -216,7 +215,7 @@ def upsert_chunks(self, chunks: Iterator[Union[Vertex, ParagraphChunk]]) -> None
)
create_vector_index_query = (
f"CALL db.addVertexVectorIndex("
f'"{GraphElemType.CHUNK.value}", "embedding", '
f'"{GraphElemType.CHUNK.value}", "_embedding", '
"{dimension: 512})"
)
self.graph_store.conn.run(query=chunk_query)
Expand Down Expand Up @@ -429,7 +428,7 @@ def _format_graph_property_schema(
_format_graph_property_schema("name", "STRING", False),
_format_graph_property_schema("_community_id", "STRING", True, True),
_format_graph_property_schema("content", "STRING", True, True),
_format_graph_property_schema("embedding", "FLOAT_VECTOR", True, False),
_format_graph_property_schema("_embedding", "FLOAT_VECTOR", True, False),
]
self.create_graph_label(
graph_elem_type=GraphElemType.CHUNK, graph_properties=chunk_proerties
Expand All @@ -441,7 +440,7 @@ def _format_graph_property_schema(
_format_graph_property_schema("name", "STRING", False),
_format_graph_property_schema("_community_id", "STRING", True, True),
_format_graph_property_schema("description", "STRING", True, True),
_format_graph_property_schema("embedding", "FLOAT_VECTOR", True, False),
_format_graph_property_schema("_embedding", "FLOAT_VECTOR", True, False),
]
self.create_graph_label(
graph_elem_type=GraphElemType.ENTITY, graph_properties=vertex_proerties
Expand Down Expand Up @@ -596,7 +595,7 @@ def explore(
vector = str(sub);
similarity_search = (
f"CALL db.vertexVectorKnnSearch("
f"'{GraphElemType.ENTITY.value}','embedding', {vector}, "
f"'{GraphElemType.ENTITY.value}','_embedding', {vector}, "
"{top_k:2, hnsw_ef_search:10})"
"YIELD node RETURN node.id AS id;"
)
Expand Down Expand Up @@ -637,7 +636,7 @@ def explore(
vector = str(sub);
similarity_search = (
f"CALL db.vertexVectorKnnSearch("
f"'{GraphElemType.ENTITY.value}','embedding', {vector}, "
f"'{GraphElemType.ENTITY.value}','_embedding', {vector}, "
"{top_k:2, hnsw_ef_search:10})"
"YIELD node RETURN node.id AS id"
)
Expand All @@ -660,7 +659,7 @@ def explore(
vector = str(sub);
similarity_search = (
f"CALL db.vertexVectorKnnSearch("
f"'{GraphElemType.ENTITY.value}','embedding', {vector}, "
f"'{GraphElemType.ENTITY.value}','_embedding', {vector}, "
"{top_k:2, hnsw_ef_search:10})"
"YIELD node RETURN node.name AS name"
)
Expand Down Expand Up @@ -717,7 +716,7 @@ def explore(
vector = str(sub);
similarity_search = (
f"CALL db.vertexVectorKnnSearch("
f"'{GraphElemType.CHUNK.value}','embedding', {vector}, "
f"'{GraphElemType.CHUNK.value}','_embedding', {vector}, "
"{top_k:2, hnsw_ef_search:10})"
"YIELD node RETURN node.name AS name"
)
Expand Down
5 changes: 1 addition & 4 deletions dbgpt/storage/knowledge_graph/community_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
from dbgpt._private.pydantic import ConfigDict, Field
from dbgpt.core import Chunk
from dbgpt.rag.transformer.community_summarizer import CommunitySummarizer
from dbgpt.rag.transformer.graph_extractor import GraphExtractor
from dbgpt.rag.transformer.graph_embedder import GraphEmbedder
from dbgpt.rag.transformer.graph_extractor import GraphExtractor
from dbgpt.storage.knowledge_graph.base import ParagraphChunk
from dbgpt.storage.knowledge_graph.community.community_store import CommunityStore
from dbgpt.storage.knowledge_graph.knowledge_graph import (
Expand Down Expand Up @@ -391,9 +391,6 @@ async def asimilar_search_with_scores(
limit=self._knowledge_graph_chunk_search_top_size,
search_scope="document_graph",
)

for vertex in subgraph.vertices():
vertex.del_prop("embedding")

knowledge_graph_str = subgraph.format() if subgraph else ""
knowledge_graph_for_doc_str = (
Expand Down

0 comments on commit a2a759f

Please sign in to comment.