From 7dad3919faebc6d54c582471e9fa102068e00dc2 Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Thu, 21 Nov 2024 14:46:11 +0800 Subject: [PATCH 01/61] Backend upgrade; --- osgraph-service-py/.env.template | 33 + osgraph-service-py/.gitignore | 19 + osgraph-service-py/README.md | 0 osgraph-service-py/app/__init__.py | 89 +++ osgraph-service-py/app/dal/__init__.py | 0 osgraph-service-py/app/dal/graph/__init__.py | 0 osgraph-service-py/app/dal/graph/tugraph.py | 360 ++++++++++ osgraph-service-py/app/dal/search/es.py | 79 ++ osgraph-service-py/app/llm/__init__.py | 0 osgraph-service-py/app/llm/openai_client.py | 42 ++ .../app/llm/prompt_templates/__init__.py | 0 .../app/llm/prompt_templates/graph_summary.py | 18 + osgraph-service-py/app/managers/__init__.py | 0 .../app/managers/develop_activities.py | 61 ++ .../app/managers/fulltext_search.py | 12 + osgraph-service-py/app/managers/graph_list.py | 16 + .../app/managers/os_interest.py | 54 ++ osgraph-service-py/app/managers/os_partner.py | 52 ++ .../app/managers/project_community.py | 63 ++ .../app/managers/project_contribution.py | 56 ++ .../app/managers/project_ecology.py | 48 ++ osgraph-service-py/app/models/__init__.py | 0 .../app/models/default_graph.py | 393 ++++++++++ osgraph-service-py/app/models/graph_view.py | 219 ++++++ osgraph-service-py/app/models/system_graph.py | 27 + osgraph-service-py/app/routes/__init__.py | 0 .../app/routes/develop_activities.py | 33 + .../app/routes/fulltext_search.py | 32 + osgraph-service-py/app/routes/graph_list.py | 31 + osgraph-service-py/app/routes/os_interest.py | 39 + osgraph-service-py/app/routes/os_partner.py | 33 + .../app/routes/project_community.py | 33 + .../app/routes/project_contribution.py | 33 + .../app/routes/project_ecology.py | 33 + osgraph-service-py/app/services/__init__.py | 26 + .../app/services/fulltext_search.py | 23 + osgraph-service-py/app/services/graph_list.py | 13 + .../app/services/graph_services/__init__.py | 4 + .../app/services/graph_services/base.py | 78 ++ .../graph_services/develop_activities.py | 48 ++ .../services/graph_services/os_interest.py | 50 ++ .../app/services/graph_services/os_partner.py | 48 ++ .../graph_services/project_community.py | 52 ++ .../graph_services/project_contribution.py | 57 ++ .../graph_services/project_ecology.py | 48 ++ .../app/services/graph_summary.py | 8 + .../app/utils/custom_exceptions.py | 15 + osgraph-service-py/app/utils/logger.py | 26 + .../app/utils/response_handler.py | 25 + osgraph-service-py/config.py | 15 + osgraph-service-py/poetry.lock | 678 ++++++++++++++++++ osgraph-service-py/pyproject.toml | 19 + osgraph-service-py/run.py | 20 + 53 files changed, 3161 insertions(+) create mode 100644 osgraph-service-py/.env.template create mode 100644 osgraph-service-py/.gitignore create mode 100644 osgraph-service-py/README.md create mode 100644 osgraph-service-py/app/__init__.py create mode 100644 osgraph-service-py/app/dal/__init__.py create mode 100644 osgraph-service-py/app/dal/graph/__init__.py create mode 100644 osgraph-service-py/app/dal/graph/tugraph.py create mode 100644 osgraph-service-py/app/dal/search/es.py create mode 100644 osgraph-service-py/app/llm/__init__.py create mode 100644 osgraph-service-py/app/llm/openai_client.py create mode 100644 osgraph-service-py/app/llm/prompt_templates/__init__.py create mode 100644 osgraph-service-py/app/llm/prompt_templates/graph_summary.py create mode 100644 osgraph-service-py/app/managers/__init__.py create mode 100644 osgraph-service-py/app/managers/develop_activities.py create mode 100644 osgraph-service-py/app/managers/fulltext_search.py create mode 100644 osgraph-service-py/app/managers/graph_list.py create mode 100644 osgraph-service-py/app/managers/os_interest.py create mode 100644 osgraph-service-py/app/managers/os_partner.py create mode 100644 osgraph-service-py/app/managers/project_community.py create mode 100644 osgraph-service-py/app/managers/project_contribution.py create mode 100644 osgraph-service-py/app/managers/project_ecology.py create mode 100644 osgraph-service-py/app/models/__init__.py create mode 100644 osgraph-service-py/app/models/default_graph.py create mode 100644 osgraph-service-py/app/models/graph_view.py create mode 100644 osgraph-service-py/app/models/system_graph.py create mode 100644 osgraph-service-py/app/routes/__init__.py create mode 100644 osgraph-service-py/app/routes/develop_activities.py create mode 100644 osgraph-service-py/app/routes/fulltext_search.py create mode 100644 osgraph-service-py/app/routes/graph_list.py create mode 100644 osgraph-service-py/app/routes/os_interest.py create mode 100644 osgraph-service-py/app/routes/os_partner.py create mode 100644 osgraph-service-py/app/routes/project_community.py create mode 100644 osgraph-service-py/app/routes/project_contribution.py create mode 100644 osgraph-service-py/app/routes/project_ecology.py create mode 100644 osgraph-service-py/app/services/__init__.py create mode 100644 osgraph-service-py/app/services/fulltext_search.py create mode 100644 osgraph-service-py/app/services/graph_list.py create mode 100644 osgraph-service-py/app/services/graph_services/__init__.py create mode 100644 osgraph-service-py/app/services/graph_services/base.py create mode 100644 osgraph-service-py/app/services/graph_services/develop_activities.py create mode 100644 osgraph-service-py/app/services/graph_services/os_interest.py create mode 100644 osgraph-service-py/app/services/graph_services/os_partner.py create mode 100644 osgraph-service-py/app/services/graph_services/project_community.py create mode 100644 osgraph-service-py/app/services/graph_services/project_contribution.py create mode 100644 osgraph-service-py/app/services/graph_services/project_ecology.py create mode 100644 osgraph-service-py/app/services/graph_summary.py create mode 100644 osgraph-service-py/app/utils/custom_exceptions.py create mode 100644 osgraph-service-py/app/utils/logger.py create mode 100644 osgraph-service-py/app/utils/response_handler.py create mode 100644 osgraph-service-py/config.py create mode 100644 osgraph-service-py/poetry.lock create mode 100644 osgraph-service-py/pyproject.toml create mode 100644 osgraph-service-py/run.py diff --git a/osgraph-service-py/.env.template b/osgraph-service-py/.env.template new file mode 100644 index 0000000..8615cbd --- /dev/null +++ b/osgraph-service-py/.env.template @@ -0,0 +1,33 @@ +# mysql config +MYSQL_HOST=127.0.0.1 +MYSQL_PORT=3306 +MYSQL_USER=root +MYSQL_PASSWORD=tuMaker0520@ +MYSQL_DB=osgraph + +# es config +ES_HOST=127.0.0.1 +ES_PORT=9200 +ES_USERNAME=elastic +ES_PASSWORD=es + +# tugraph config +TUGRAPHDB_HOST=127.0.0.1 +TUGRAPHDB_PORT=7687 +TUGRAPHDB_USER=admin +TUGRAPHDB_PASSWORD=73@TuGraph +TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME=default +TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME=system + +# flask config +FLASK_CONFIG=production + +# OpenAI key +SUMMARY_GRAPH=off # on | off +OPENAI_KEY= +BASEURL= +MODEL= + + + + diff --git a/osgraph-service-py/.gitignore b/osgraph-service-py/.gitignore new file mode 100644 index 0000000..67f0865 --- /dev/null +++ b/osgraph-service-py/.gitignore @@ -0,0 +1,19 @@ +# Virtual environments +.venv/ +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +.env +logs/ +*.log + +# Python cache +__pycache__/ +*.pyc + +# IDE files +.vscode/ +.idea/ +.DS_Store diff --git a/osgraph-service-py/README.md b/osgraph-service-py/README.md new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/__init__.py b/osgraph-service-py/app/__init__.py new file mode 100644 index 0000000..26e33c4 --- /dev/null +++ b/osgraph-service-py/app/__init__.py @@ -0,0 +1,89 @@ +# app/__init__.py +from flask import Flask, jsonify +import os +import importlib +from .utils.logger import setup_logger +from .utils.custom_exceptions import InvalidUsage +from app.services import register_all_services +from app.dal.graph.tugraph import GraphClient, GraphLabel, LabelProps +from app.models.system_graph import GraphService +from dotenv import load_dotenv +load_dotenv() + +def create_app(config_class: str = 'config.ProductionConfig') -> Flask: + app = Flask(__name__) + app.config.from_object(config_class) + setup_logger(app) + register_blueprints(app) + with app.app_context(): + initialize_system_graph(app) + register_all_services() + register_error_handlers(app) + return app + +def register_blueprints(app: Flask, blueprint_folder: str = "routes") -> None: + current_dir = os.path.dirname(os.path.abspath(__file__)) + blueprints_path = os.path.join(current_dir, blueprint_folder) + for filename in os.listdir(blueprints_path): + if filename.endswith(".py") and filename != "__init__.py": + module_name = filename[:-3] + module_path = f"app.{blueprint_folder}.{module_name}" + module = importlib.import_module(module_path) + for attr_name in dir(module): + if attr_name.endswith("_bp"): + blueprint = getattr(module, attr_name) + if hasattr(blueprint, "name") and hasattr(blueprint, "url_prefix"): + app.register_blueprint(blueprint) + + +def register_error_handlers(app: Flask) -> None: + @app.errorhandler(InvalidUsage) + def handle_invalid_usage(error): + response = jsonify(error.to_dict()) + response.status_code = error.status_code + app.logger.error(f"InvalidUsage: {error.message}") + return response + + @app.errorhandler(404) + def not_found(error): + app.logger.warning("404 Not Found") + return jsonify({"message": "Resource not found"}), 404 + + @app.errorhandler(500) + def internal_error(error): + app.logger.error("Internal Server Error") + return jsonify({"message": "Internal server error"}), 500 + +def initialize_system_graph(app: Flask): + """ + 初始化 system_graph 和 graph_service 的基础库表。 + """ + graph_name = os.getenv('TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME') + client = GraphClient(graph_name) + + try: + system_graph = client.get_graph() + if system_graph: + app.logger.info("system_graph 图已存在") + else: + client.create_graph() + app.logger.info("system_graph 图已创建") + graph_service = client.get_label("vertex", "graph_service") + if graph_service: + app.logger.info("graph_service Label 已存在") + else: + label = GraphLabel( + label=GraphService.label, + primary=GraphService.primary, + type=GraphService.type, + properties=[ + LabelProps(name=key, type="string", optional=True) + for key in vars(GraphService.props).keys() + ] + ) + client.create_label(label) + app.logger.info("graph_service Label 已创建") + except Exception as e: + app.logger.error(f"初始化 system_graph 失败: {str(e)}") + finally: + client.close() diff --git a/osgraph-service-py/app/dal/__init__.py b/osgraph-service-py/app/dal/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/dal/graph/__init__.py b/osgraph-service-py/app/dal/graph/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/dal/graph/tugraph.py b/osgraph-service-py/app/dal/graph/tugraph.py new file mode 100644 index 0000000..641a2ac --- /dev/null +++ b/osgraph-service-py/app/dal/graph/tugraph.py @@ -0,0 +1,360 @@ +# app/dal/graph/tugraph.py +import os +from neo4j import GraphDatabase +from dotenv import load_dotenv +from typing import Optional, List, Dict, Any +from flask import current_app +from dataclasses import dataclass, asdict, is_dataclass +from app.models.default_graph import Edge, Vertex +import json + +load_dotenv() + +@dataclass +class LabelProps: + name:str + type: str + optional: bool + index: Optional[bool] = None + +@dataclass +class GraphLabel: + label: str + primary: str + type: str + properties: Optional[List[LabelProps]] = None + + def to_dict(self) -> Dict[str, Any]: + # 遍历属性列表,找到与 primary 匹配的属性并设置 optional 和 index + if self.properties: + for prop in self.properties: + if prop.name == self.primary: + prop.optional = False + prop.index = True + else: + prop.optional = True + prop.index = False + prop.type = prop.type.upper() + return { + "label": self.label, + "primary": self.primary, + "type": self.type, + "properties": [prop.__dict__ for prop in self.properties] if self.properties else [] + } + +class GraphClient: + def __init__(self, graph_name): + TUGRAPHDB_HOST = os.getenv("TUGRAPHDB_HOST") + TUGRAPHDB_PORT = os.getenv("TUGRAPHDB_PORT") + TUGRAPHDB_USER = os.getenv("TUGRAPHDB_USER") + TUGRAPHDB_PASSWORD = os.getenv("TUGRAPHDB_PASSWORD") + self.driver = GraphDatabase.driver(f'bolt://{TUGRAPHDB_HOST}:{TUGRAPHDB_PORT}', auth=(TUGRAPHDB_USER, TUGRAPHDB_PASSWORD)) + self.graph_name = graph_name + + def close(self): + if self.driver: + self.driver.close() + current_app.logger.info("TuGraph connection closed.") + + # 创建 Label + def create_label(self, label: GraphLabel): + try: + with self.driver.session(database=self.graph_name) as session: + label_json = json.dumps(label.to_dict()) + query = f"""CALL db.createVertexLabelByJson('{label_json}')""" + session.run(query) + current_app.logger.info(f"Label '{label}' created.") + except Exception as e: + current_app.logger.info(f"Label '{label}' may already exist. {str(e)}") + + def get_label(self,label_type:str,label_name:str) -> Dict[str,any]: + try: + with self.driver.session(database=self.graph_name) as session: + if label_type == 'vertex': + query = f"""CALL db.getVertexSchema('{label_name}')""" + else: + query = f"""CALL db.getEdgeSchema('{label_name}')""" + result = session.run(query).data() + return json.dumps(result) + except Exception as e: + current_app.logger.info(f"Faild to get {label_type} {label_name} . Errormessage: {str(e)}") + + + # 创建节点 + def create_vertex(self, label:str, properties:Dict[str,any]): + try: + properties_str = self._convert_dict_to_str(properties) + query = f""" + CREATE (n:{label} {properties_str}) RETURN n + """ + with self.driver.session(database=self.graph_name) as session: + result = session.run(query) + current_app.logger.info(f"Vertex '{ json.dumps(properties)}' created success.") + return result.data() + except Exception as e: + current_app.logger.info(f"Vertex '{ json.dumps(properties)}' created faild. Error message : {str(e)}") + + # 创建边 + def create_relationship( + self, + src_label: str = "", + src_filter: Optional[Dict[str, Any]] = None, + dst_label: str = "", + dst_filter: Optional[Dict[str, Any]] = None, + relationship_type: str = "", + properties: Optional[Dict[str, Any]] = None + ) -> None: + try: + query = f""" + MATCH (n:{src_label}) + MATCH (m:{dst_label}) + """ + conditions = [] + if src_filter: + for key, value in src_filter.items(): + if isinstance(value, str): + conditions.append(f"n.{key} = '{value}'") + else: + conditions.append(f"n.{key} = {value}") + if dst_filter: + for key, value in dst_filter.items(): + if isinstance(value, str): + conditions.append(f"m.{key} = '{value}'") + else: + conditions.append(f"m.{key} = {value}") + if conditions: + query += " WHERE " + " AND ".join(conditions) + properties_str = self._convert_dict_to_str(properties) + query += f""" + CREATE (n)-[r:{relationship_type} {properties_str}]->(m) + RETURN r + """ + with self.driver.session(database=self.graph_name) as session: + result = session.run(query) + current_app.logger.info(f"Relationship '{json.dumps(properties)}' created.") + return result.data() + except Exception as e: + current_app.logger.error(f"Relationship '{json.dumps(properties)}' creation failed. Error message: {str(e)}") + return None + def delete_relationship( + self, + src_label: str = "", + src_filter: Optional[Dict[str, Any]] = None, + dst_label: str = "", + dst_filter: Optional[Dict[str, Any]] = None, + relationship_type: str = "", + relationship_filter: Optional[Dict[str, Any]] = None, + ) -> None: + query = f"MATCH (n:{src_label})-[r:{relationship_type}]-(m:{dst_label})" + conditions = [] + if src_filter: + for key, value in src_filter.items(): + if isinstance(value, str): + conditions.append(f'n.{key} = "{value}"') + else: + conditions.append(f"n.{key} = {value}") + if dst_filter: + for key, value in dst_filter.items(): + if isinstance(value, str): + conditions.append(f'm.{key} = "{value}"') + else: + conditions.append(f"m.{key} = {value}") + if relationship_filter: + for key, value in relationship_filter.items(): + if isinstance(value, str): + conditions.append(f'r.{key} = "{value}"') + else: + conditions.append(f"r.{key} = {value}") + if conditions: + query += " WHERE " + " AND ".join(conditions) + query += " DELETE r" + print(f"Generated Cypher Query: {query}") + try: + with self.driver.session(database=self.graph_name) as session: + session.run(query) + print("Relationship deleted successfully.") + except Exception as e: + print(f"Failed to delete relationship: {e}") + def upsert_vertex(self, label, properties): + try: + with self.driver.session(database=self.graph_name) as session: + query = ( + "CALL db.upsertVertex(" + f'"{label}", ' + f"[{self._convert_dict_to_str(properties)}])" + ) + result = session.run(query).data() + return result + except Exception as e: + print(f"Failed to update_node: {e}") + + def get_vertex(self, vertex_instance: Vertex, limit: Optional[int] = None): + if not isinstance(vertex_instance, Vertex): + raise ValueError("Input must be an instance of a Vertex-derived class.") + label = vertex_instance.label + filters = vertex_instance.props # Access props + query = f"MATCH (n:{label})" + if filters: + conditions = [ + f"n.{key} = '{value}'" if isinstance(value, str) else f"n.{key} = {value}" + for key, value in asdict(filters).items() if value is not None + ] + if conditions: + query += " WHERE " + " AND ".join(conditions) + query += " RETURN n" + if limit is not None: + query += f" LIMIT {limit}" + try: + with self.driver.session(database=self.graph_name) as session: + result = session.run(query) + return result.data() + except Exception as e: + print(f"Error fetching vertex: {e}") + return None + + def get_edge(self, edge_instance: Edge, deep: int = 3, limit: int = 50) -> Optional[list]: + if not isinstance(edge_instance, Edge): + raise ValueError("Input must be an instance of an Edge-derived class.") + + # Extract edge label and properties + label = edge_instance.label + source = edge_instance.source + target = edge_instance.target + props = edge_instance._props + + # Build the MATCH pattern + query = f"MATCH p=(n:{source.label})-[r:{label}*1..{deep}]-(m:{target.label})" + + # Add WHERE conditions for properties of n, r, and m + conditions = [] + + # Add source (n) filters + if hasattr(source, "props") and source.props: + conditions += [ + f"n.{key} = '{value}'" if isinstance(value, str) else f"n.{key} = {value}" + for key, value in asdict(source.props).items() if value is not None + ] + + # Add relationship (r) filters + if props: + conditions += [ + f"r.{key} = '{value}'" if isinstance(value, str) else f"r.{key} = {value}" + for key, value in asdict(props).items() if value is not None + ] + + # Add target (m) filters + if hasattr(target, "props") and target.props: + conditions += [ + f"m.{key} = '{value}'" if isinstance(value, str) else f"m.{key} = {value}" + for key, value in asdict(target.props).items() if value is not None + ] + + # Append WHERE clause if conditions exist + if conditions: + query += " WHERE " + " AND ".join(conditions) + + # Add RETURN and LIMIT clauses + query += f" RETURN p LIMIT {limit}" + + try: + with self.driver.session(database=self.graph_name) as session: + result = session.run(query) + return self._parse_edge_result(result) + except Exception as e: + print(f"Error fetching edge: {e}") + return None + + + def get_graph(self) -> Optional[dict]: + with self.driver.session(database='default') as session: + graph_list = session.run("CALL dbms.graph.listGraphs()").data() + result = next((graph for graph in graph_list if graph.get("graph_name") == self.graph_name), None) + return result + + def create_graph(self): + """Create a new graph in the database if it doesn't already exist.""" + try: + with self.driver.session(database="default") as session: + session.run(f"CALL dbms.graph.createGraph('{self.graph_name}', '', 2048)") + except Exception as e: + raise Exception(f"Failed to create graph '{self.graph_name}': {str(e)}") from e + + def run(self,cypher:str) -> Any: + try: + with self.driver.session(database=self.graph_name) as session: + result = session.run(cypher) + return result.data() + except Exception as e: + raise Exception(f"Error : {e}") + + + def _convert_dict_to_str(self, properties: Any) -> str: + if not properties: + return "" + + # 如果是 dataclass,则将其转换为字典 + if is_dataclass(properties): + properties = asdict(properties) + + def convert_value(value: Any) -> str: + if isinstance(value, str): + return f'"{value}"' + elif isinstance(value, (int, float, bool)): + return str(value).lower() if isinstance(value, bool) else str(value) + elif isinstance(value, list): + return "[" + ", ".join(convert_value(item) for item in value) + "]" + elif isinstance(value, dict): + return "{" + ", ".join(f"{k}: {convert_value(v)}" for k, v in value.items()) + "}" + elif callable(value): + return convert_value(value()) + else: + return f'"{str(value)}"' + properties_str = ( + "{" + + ", ".join(f"{k}: {convert_value(v)}" for k, v in properties.items()) + + "}" + ) + return properties_str + + + def _parse_edge_result(self, query_result: list) -> list: + parsed_results = [] + for record in query_result: + path = record.get('p') # Extract the Path object from the result + + if path: + # Extract nodes (start and end nodes are the first and last in the list) + nodes = path.nodes + start_node = { + "element_id": nodes[0].element_id, + "type": list(nodes[0].labels), + "properties": nodes[0]._properties, + } + end_node = { + "element_id": nodes[-1].element_id, + "type": list(nodes[-1].labels), + "properties": nodes[-1]._properties, + } + + # Extract relationships along the path + relationships = [] + for relationship in path.relationships: + relationships.append({ + "type": relationship.type, + "element_id": relationship.element_id, + "properties": relationship._properties, + }) + + parsed_results.append({ + "start": start_node, + "relationships": relationships, + "end": end_node, + }) + + return parsed_results + + + + + + diff --git a/osgraph-service-py/app/dal/search/es.py b/osgraph-service-py/app/dal/search/es.py new file mode 100644 index 0000000..7ae7cab --- /dev/null +++ b/osgraph-service-py/app/dal/search/es.py @@ -0,0 +1,79 @@ +# # app/dal/graph/es.py +from elasticsearch import Elasticsearch +from elasticsearch.exceptions import NotFoundError, ConnectionError, RequestError +from typing import Dict, List, Optional, Any +import logging +from dotenv import load_dotenv +import os +load_dotenv() +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +class ElasticsearchClient: + def __init__(self,): + """初始化 Elasticsearch 连接""" + ES_HOST = os.getenv("ES_HOST") + ES_PORT = os.getenv("ES_PORT") + ES_USERNAME = os.getenv("ES_USERNAME") + ES_PASSWORD = os.getenv("ES_PASSWORD") + try: + self.es = Elasticsearch( + hosts=[f"http://{ES_HOST}:{ES_PORT}"], + basic_auth=(ES_USERNAME, ES_PASSWORD) if ES_USERNAME and ES_PASSWORD else None, + verify_certs=False + ) + if self.es.ping(): + logger.info("Connected to Elasticsearch successfully.") + else: + logger.error("Failed to connect to Elasticsearch.") + except ConnectionError as e: + logger.error(f"Error connecting to Elasticsearch: {e}") + raise + + def search(self, index: str, query: Dict[str, Any], size: int = 10) -> List[Dict[str, Any]]: + try: + response = self.es.search(index=index, query=query, size=size) + return [hit["_source"] for hit in response['hits']['hits']] + except NotFoundError: + logger.error(f"Index '{index}' not found.") + return [] + except RequestError as e: + logger.error(f"Search error: {e}") + return [] + + def insert(self, index: str, document: Dict[str, Any]) -> Optional[str]: + try: + response = self.es.index(index=index, document=document) + return response["_id"] + except RequestError as e: + logger.error(f"Insert error: {e}") + return None + + def update(self, index: str, doc_id: str, update_fields: Dict[str, Any]) -> bool: + try: + self.es.update(index=index, id=doc_id, body={"doc": update_fields}) + return True + except NotFoundError: + logger.error(f"Document '{doc_id}' not found in index '{index}'.") + return False + except RequestError as e: + logger.error(f"Update error: {e}") + return False + + def delete(self, index: str, doc_id: str) -> bool: + try: + self.es.delete(index=index, id=doc_id) + return True + except NotFoundError: + logger.error(f"Document '{doc_id}' not found in index '{index}'.") + return False + except RequestError as e: + logger.error(f"Delete error: {e}") + return False + + def close(self): + self.es.close() + logger.info("Elasticsearch connection closed.") + + + diff --git a/osgraph-service-py/app/llm/__init__.py b/osgraph-service-py/app/llm/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/llm/openai_client.py b/osgraph-service-py/app/llm/openai_client.py new file mode 100644 index 0000000..7eeee17 --- /dev/null +++ b/osgraph-service-py/app/llm/openai_client.py @@ -0,0 +1,42 @@ +from openai import OpenAI +import os +from dotenv import load_dotenv +from app.llm.prompt_templates.graph_summary import get_graph_summary_prompt +load_dotenv() +class OpenAIClient: + def __init__(self): + self.client = OpenAI( + api_key=os.getenv('OPENAI_KEY'), + base_url=os.getenv('BASEURL'), + ) + + def ask_question(self, question: str, context: str = "", temperature: float = 0.7): + try: + model=os.getenv('MODEL') + response = self.client.chat.completions.create( + model=model, + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": context}, + {"role": "user", "content": question} + ], + stream=False, + temperature=temperature + ) + return response.choices[0].message.content + except Exception as e: + return f"Error occurred: {str(e)}" + + def summary_graph(self, graph_data: dict, ): + prompt = get_graph_summary_prompt(graph_data = graph_data) + return self.ask_question(prompt) + +# 示例用法 +if __name__ == "__main__": + api_key = "your-openai-api-key" + client = OpenAIClient(api_key) + + question = "什么是Python的主要用途?" + context = "请简要回答关于编程语言的问题。" + answer = client.ask_question(question, context) + print("回答:", answer) diff --git a/osgraph-service-py/app/llm/prompt_templates/__init__.py b/osgraph-service-py/app/llm/prompt_templates/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/llm/prompt_templates/graph_summary.py b/osgraph-service-py/app/llm/prompt_templates/graph_summary.py new file mode 100644 index 0000000..d4c37f3 --- /dev/null +++ b/osgraph-service-py/app/llm/prompt_templates/graph_summary.py @@ -0,0 +1,18 @@ +def get_graph_summary_prompt(graph_data: dict) -> str: + """ + 构建用于知识图谱分析的提示词。 + + :param graph_data: 图数据,格式为包含 'vertices' 和 'edges' 的字典。 + :return: 构造好的提示词。 + """ + template = ( + "你是一个图数据分析专家,请根据这张知识图谱进行总结:\n\n" + "知识图数据如下:\n'{graph}'\n\n" + "请你分析以下内容:\n" + "1. 图的整体结构是什么?\n" + "2. 图中最重要的节点是什么?为什么?\n" + "3. 图中是否有明显的社区或分组?如果有,简要描述。\n" + "4. 总结图的主要特征和潜在意义。" + ) + graph_string = f"vertices: {graph_data.get('vertices', [])}, edges: {graph_data.get('edges', [])}" + return template.replace("{graph}", graph_string) \ No newline at end of file diff --git a/osgraph-service-py/app/managers/__init__.py b/osgraph-service-py/app/managers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/managers/develop_activities.py b/osgraph-service-py/app/managers/develop_activities.py new file mode 100644 index 0000000..e656540 --- /dev/null +++ b/osgraph-service-py/app/managers/develop_activities.py @@ -0,0 +1,61 @@ +# app/manager/develop_activities.py +from app.models.graph_view import Graph, User, Repo, Push, CreatePR, CodeReviewAction, CreateIssue, CommentIssue +from typing import Dict, Any +from app.services.graph_services.develop_activities import DevelopActivitiesService +import json +import os + +class DevelopActivitiesManager: + def __init__(self) -> None: + pass + + def get_graph(self, data:Dict[str, Any]) -> Dict | None: + service = DevelopActivitiesService() + graph = Graph() + result = service.execute(data=data) + if result: + for data in result: + start_node = json.loads(data['start_node']) + relationship = json.loads(data['relationship']) + end_node = json.loads(data['end_node']) + if start_node["type"] == 'github_user': + user = User(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(user) + if start_node["type"] == 'github_repo': + repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(repo) + + if end_node["type"] == 'github_user': + user = User(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(user) + if end_node["type"] == 'github_repo': + repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(repo) + + if relationship["type"] == "push": + push = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(push) + + if relationship["type"] == "open_pr": + open_pr = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(open_pr) + + if relationship["type"] == "code_review": + code_review = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(code_review) + + if relationship["type"] == "open_issue": + open_issue = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(open_issue) + + if relationship["type"] == "comment_issue": + comment_issue = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(comment_issue) + + if os.getenv('SUMMARY_GRAPH') == 'on': + from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() + summary = summary_service.execute(data = graph.to_dict()) + if summary: + graph.update_summary(summary=summary) + return graph.to_dict() \ No newline at end of file diff --git a/osgraph-service-py/app/managers/fulltext_search.py b/osgraph-service-py/app/managers/fulltext_search.py new file mode 100644 index 0000000..5cdb5d2 --- /dev/null +++ b/osgraph-service-py/app/managers/fulltext_search.py @@ -0,0 +1,12 @@ +# app/manager/fulltext_search.py +from typing import List +from app.services.fulltext_search import FulltextSearchService + +class FulltextSearchManager: + def __init__(self) -> None: + pass + def search(self,data) -> List | None: + service = FulltextSearchService() + result = service.execute(data=data) + if result: + return result \ No newline at end of file diff --git a/osgraph-service-py/app/managers/graph_list.py b/osgraph-service-py/app/managers/graph_list.py new file mode 100644 index 0000000..9077587 --- /dev/null +++ b/osgraph-service-py/app/managers/graph_list.py @@ -0,0 +1,16 @@ +# app/manager/graph_list.py +from typing import List +from app.services.graph_list import GraphListService + +class GraphListManager: + def __init__(self) -> None: + pass + + def get_graph_list(self) -> List | None: + service = GraphListService() + graph_list:List = [] + result = service.execute() + if result: + for item in result: + graph_list.append(item["n"]) + return graph_list \ No newline at end of file diff --git a/osgraph-service-py/app/managers/os_interest.py b/osgraph-service-py/app/managers/os_interest.py new file mode 100644 index 0000000..793b444 --- /dev/null +++ b/osgraph-service-py/app/managers/os_interest.py @@ -0,0 +1,54 @@ +# app/manager/os_interest.py +from app.models.graph_view import Graph, User, Repo, Topic, Belong, ContributeRepo +from typing import Dict, Any +from app.services.graph_services.os_interest import OSInterestService +import json +import os + + +class OSInterestManager: + def __init__(self) -> None: + pass + + def get_graph(self, data:Dict[str, Any]) -> Dict | None: + service = OSInterestService() + graph = Graph() + result = service.execute(data=data) + if result: + for data in result: + start_node = json.loads(data['start_node']) + relationship = json.loads(data['relationship']) + end_node = json.loads(data['end_node']) + if start_node["type"] == 'github_user': + user = User(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(user) + if start_node["type"] == 'github_repo': + repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(repo) + if start_node["type"] == 'topic': + topic = Topic(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(topic) + + if end_node["type"] == 'github_user': + user = User(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(user) + if end_node["type"] == 'github_repo': + repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(repo) + if end_node["type"] == 'topic': + topic = Topic(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(topic) + + if relationship["type"] == "belong_to": + belong_to = Belong(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"]) + graph.insert_relationship(belong_to) + if relationship["type"] == "repo": + contribute_repo = ContributeRepo(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + graph.insert_relationship(contribute_repo) + if os.getenv('SUMMARY_GRAPH') == 'on': + from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() + summary = summary_service.execute(data = graph.to_dict()) + if summary: + graph.update_summary(summary=summary) + return graph.to_dict() \ No newline at end of file diff --git a/osgraph-service-py/app/managers/os_partner.py b/osgraph-service-py/app/managers/os_partner.py new file mode 100644 index 0000000..58a875f --- /dev/null +++ b/osgraph-service-py/app/managers/os_partner.py @@ -0,0 +1,52 @@ +# app/manager/os_partner.py +from app.models.graph_view import Graph, User, CommonIssue, CommonPR, CommonStar, CommonRepo +from typing import Dict, Any +from app.services.graph_services.os_partner import OSPartnerService +import json +import os + +class OSPartnerManager: + def __init__(self) -> None: + pass + + def get_graph(self, data:Dict[str, Any]) -> Dict | None: + service = OSPartnerService() + graph = Graph() + result = service.execute(data=data) + if result: + if result: + for data in result: + start_node = json.loads(data['start_node']) + relationship = json.loads(data['relationship']) + end_node = json.loads(data['end_node']) + if start_node["type"] == 'github_user': + src_user = User(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(src_user) + + if end_node["type"] == 'github_user': + tar_user = User(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(tar_user) + + if end_node["type"] == 'common_issue': + common_issue = CommonIssue(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_entity(common_issue) + + if relationship["type"] == "common_pr": + common_pr = CommonPR(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(common_pr) + + if relationship["type"] == "common_star": + common_star = CommonStar(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + graph.insert_relationship(common_star) + + if relationship["type"] == "common_repo": + common_repo = CommonRepo(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + graph.insert_relationship(common_repo) + + if os.getenv('SUMMARY_GRAPH') == 'on': + from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() + summary = summary_service.execute(data = graph.to_dict()) + if summary: + graph.update_summary(summary=summary) + return graph.to_dict() \ No newline at end of file diff --git a/osgraph-service-py/app/managers/project_community.py b/osgraph-service-py/app/managers/project_community.py new file mode 100644 index 0000000..c0f5661 --- /dev/null +++ b/osgraph-service-py/app/managers/project_community.py @@ -0,0 +1,63 @@ +# app/manager/project_community.py +from app.models.graph_view import Graph, Repo, Company, Country, User, PullRequestAction, Star, Belong +from typing import Dict, Any +from app.services.graph_services.project_community import ProjectCommunityService +import json +import os + + +class ProjectCommunityManager: + def __init__(self) -> None: + pass + + def get_graph(self, data:Dict[str, Any]) -> Dict | None: + service = ProjectCommunityService() + graph = Graph() + result = service.execute(data=data) + if result: + for data in result: + start_node = json.loads(data['start_node']) + relationship = json.loads(data['relationship']) + end_node = json.loads(data['end_node']) + if start_node["type"] == 'github_user': + user = User(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(user) + if start_node["type"] == 'github_repo': + repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(repo) + if start_node["type"] == 'country': + country = Country(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(country) + if start_node["type"] == 'company': + company = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(company) + + if end_node["type"] == 'github_user': + user = User(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(user) + if end_node["type"] == 'github_repo': + repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(repo) + if start_node["type"] == 'country': + country = Country(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(country) + if start_node["type"] == 'company': + company = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(company) + + if relationship["type"] == "PR": + pr = PullRequestAction(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(pr) + if relationship["type"] == "Star": + star = Star(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(star) + if relationship["type"] == "belong_to": + belong = Belong(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"]) + graph.insert_relationship(belong) + if os.getenv('SUMMARY_GRAPH') == 'on': + from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() + summary = summary_service.execute(data = graph.to_dict()) + if summary: + graph.update_summary(summary=summary) + return graph.to_dict() \ No newline at end of file diff --git a/osgraph-service-py/app/managers/project_contribution.py b/osgraph-service-py/app/managers/project_contribution.py new file mode 100644 index 0000000..1b80dc0 --- /dev/null +++ b/osgraph-service-py/app/managers/project_contribution.py @@ -0,0 +1,56 @@ +# app/manager/project_contribution.py +from app.models.graph_view import Graph, User, Repo, CodeReviewAction, CreateIssue, CommitAction, CommentIssue, CreatePR +from typing import Dict, Any +from app.services.graph_services.project_contribution import ProjectContributionService +import json +import os + + +class ProjectContributionManager: + def __init__(self) -> None: + pass + + def get_graph(self, data:Dict[str, Any]) -> Dict | None: + service = ProjectContributionService() + graph = Graph() + result = service.execute(data=data) + if result: + for data in result: + start_node = json.loads(data['start_node']) + relationship = json.loads(data['relationship']) + end_node = json.loads(data['end_node']) + if start_node["type"] == 'github_user': + user = User(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(user) + if start_node["type"] == 'github_repo': + repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(repo) + if end_node["type"] == 'github_user': + user = User(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(user) + if end_node["type"] == 'github_repo': + repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(repo) + + if relationship["type"] == "open_issue": + create_issue = CreateIssue(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(create_issue) + if relationship["type"] == "push": + commit = CommitAction(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(commit) + if relationship["type"] == "open_pr": + create_pr = CreatePR(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(create_pr) + if relationship["type"] == "code_review": + cr = CodeReviewAction(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(cr) + if relationship["type"] == "comment_issue": + comment_issue = CommentIssue(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + graph.insert_relationship(comment_issue) + if os.getenv('SUMMARY_GRAPH') == 'on': + from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() + summary = summary_service.execute(data = graph.to_dict()) + if summary: + graph.update_summary(summary=summary) + return graph.to_dict() \ No newline at end of file diff --git a/osgraph-service-py/app/managers/project_ecology.py b/osgraph-service-py/app/managers/project_ecology.py new file mode 100644 index 0000000..167e1f9 --- /dev/null +++ b/osgraph-service-py/app/managers/project_ecology.py @@ -0,0 +1,48 @@ +# app/manager/project_ecology.py +from app.models.graph_view import Graph, User, Repo, Orgnization, Belong, CommonDevelop +from typing import Dict, Any +from app.services.graph_services.project_ecology import ProjectEcologyService +import json +import os + + +class ProjectEcologyManager: + def __init__(self) -> None: + pass + + def get_graph(self, data:Dict[str, Any]) -> Dict | None: + service = ProjectEcologyService() + graph = Graph() + result = service.execute(data=data) + if result: + for data in result: + start_node = json.loads(data['start_node']) + relationship = json.loads(data['relationship']) + end_node = json.loads(data['end_node']) + if start_node["type"] == 'github_repo': + user = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(user) + if start_node["type"] == 'github_organization': + organization = Orgnization(id=start_node["id"],name=start_node["properties"]["name"]) + graph.insert_entity(organization) + if end_node["type"] == 'github_repo': + user = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(user) + if end_node["type"] == 'github_organization': + organization = Orgnization(id=end_node["id"],name=end_node["properties"]["name"]) + graph.insert_entity(organization) + + if relationship["type"] == "belong_to": + belong_to = Belong(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"]) + graph.insert_relationship(belong_to) + + if relationship["type"] == "common_developer": + common_developer = CommonDevelop(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + graph.insert_relationship(common_developer) + if os.getenv('SUMMARY_GRAPH') == 'on': + from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() + summary = summary_service.execute(data = graph.to_dict()) + if summary: + graph.update_summary(summary=summary) + return graph.to_dict() \ No newline at end of file diff --git a/osgraph-service-py/app/models/__init__.py b/osgraph-service-py/app/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/models/default_graph.py b/osgraph-service-py/app/models/default_graph.py new file mode 100644 index 0000000..31add8e --- /dev/null +++ b/osgraph-service-py/app/models/default_graph.py @@ -0,0 +1,393 @@ +from dataclasses import dataclass, asdict +from typing import Any, Optional +import os +from dotenv import load_dotenv +load_dotenv() + +graph_name = os.getenv('TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME') +class Vertex: + label: str + primary: str + type: str = 'vertex' + _props: Optional[Any] = None + + def __init__(self, label: str, primary: str): + self.label = label + self.primary = primary + + @property + def props(self) -> Any: + return self._props + + def __repr__(self): + return (f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, " + f"type={self.type}, props={self.props})") + +class Edge: + label: str + type: str = 'edge' + source: Any + target: Any + _props: Optional[Any] = None + + def __init__(self, label: str, source: Any, target: Any): + self.label = label + self.source = source + self.target = target + + @property + def props(self) -> Any: + return self._props + + def __repr__(self): + return (f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, " + f"type={self.type}, source={self.source}, target={self.target}, props={self.props})") + +@dataclass +class GitHubUserProps: + id: Optional[int] = None + name: Optional[str] = None + company: Optional[str] = None + country: Optional[str] = None + +class GitHubUser(Vertex): + def __init__(self, props: GitHubUserProps): + if not isinstance(props, GitHubUserProps): + raise ValueError("props must be an instance of GitHubUserProps.") + super().__init__(label="github_user", primary="id") + self._props = props + + def __repr__(self): + return f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, props={self._props})" + + +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class IssueProps: + id: Optional[int] = None + state: Optional[str] = None + created_at: Optional[int] = None + closed_at: Optional[int] = None + + +class Issue(Vertex): + def __init__(self, props: Optional[IssueProps] = None): + if props is None: + props = IssueProps() + if not isinstance(props, IssueProps): + raise ValueError("props must be an instance of IssueProps.") + super().__init__(label="issue", primary="id") + self._props = props + + +@dataclass +class PullRequestProps: + id: Optional[int] = None + merged: Optional[bool] = None + created_at: Optional[int] = None + closed_at: Optional[int] = None + deletions: Optional[int] = None + changed_files: Optional[int] = None + additions: Optional[int] = None + + +class PullRequest(Vertex): + def __init__(self, props: Optional[PullRequestProps] = None): + if props is None: + props = PullRequestProps() + if not isinstance(props, PullRequestProps): + raise ValueError("props must be an instance of PullRequestProps.") + super().__init__(label="pr", primary="id") + self._props = props + + +@dataclass +class LanguageProps: + name: Optional[str] = None + + +class Language(Vertex): + def __init__(self, props: Optional[LanguageProps] = None): + if props is None: + props = LanguageProps() + if not isinstance(props, LanguageProps): + raise ValueError("props must be an instance of LanguageProps.") + super().__init__(label="language", primary="name") + self._props = props + + +@dataclass +class GitHubRepoProps: + id: Optional[int] = None + name: Optional[str] = None + star: Optional[int] = None + opened_pr: Optional[int] = None + opened_issue: Optional[int] = None + merged_pr: Optional[int] = None + fork: Optional[int] = None + commits: Optional[int] = None + comments: Optional[int] = None + code_deletions: Optional[int] = None + code_changed_files: Optional[int] = None + code_additions: Optional[int] = None + closed_issue: Optional[int] = None + + +class GitHubRepo(Vertex): + def __init__(self, props: Optional[GitHubRepoProps] = None): + if props is None: + props = GitHubRepoProps() + if not isinstance(props, GitHubRepoProps): + raise ValueError("props must be an instance of GitHubRepoProps.") + super().__init__(label="github_repo", primary="id") + self._props = props + + +@dataclass +class LicenseProps: + name: Optional[str] = None + + +class License(Vertex): + def __init__(self, props: Optional[LicenseProps] = None): + if props is None: + props = LicenseProps() + if not isinstance(props, LicenseProps): + raise ValueError("props must be an instance of LicenseProps.") + super().__init__(label="license", primary="name") + self._props = props + + +@dataclass +class TopicProps: + name: Optional[str] = None + + +class Topic(Vertex): + def __init__(self, props: Optional[TopicProps] = None): + if props is None: + props = TopicProps() + if not isinstance(props, TopicProps): + raise ValueError("props must be an instance of TopicProps.") + super().__init__(label="topic", primary="name") + self._props = props + + +@dataclass +class GitHubOrganizationProps: + id: Optional[int] = None + name: Optional[str] = None + + +class GitHubOrganization(Vertex): + def __init__(self, props: Optional[GitHubOrganizationProps] = None): + if props is None: + props = GitHubOrganizationProps() + if not isinstance(props, GitHubOrganizationProps): + raise ValueError("props must be an instance of GitHubOrganizationProps.") + super().__init__(label="github_organization", primary="id") + self._props = props + +@dataclass +class PushProps: + commits: Optional[int] = None + created_at: Optional[int] = None + + +class Push(Edge): + def __init__(self, source, target, props: Optional[PushProps] = None): + if props is None: + props = PushProps() + if not isinstance(props, PushProps): + raise ValueError("props must be an instance of PushProps.") + super().__init__(label="push", source=source, target=target) + self._props = props + + +@dataclass +class ForkProps: + created_at: Optional[int] = None + + +class Fork(Edge): + def __init__(self, source, target, props: Optional[ForkProps] = None): + if props is None: + props = ForkProps() + if not isinstance(props, ForkProps): + raise ValueError("props must be an instance of ForkProps.") + super().__init__(label="fork", source=source, target=target) + self._props = props + + +@dataclass +class StarProps: + created_at: Optional[int] = None + + +class Star(Edge): + def __init__(self, source, target, props: Optional[StarProps] = None): + if props is None: + props = StarProps() + if not isinstance(props, StarProps): + raise ValueError("props must be an instance of StarProps.") + super().__init__(label="star", source=source, target=target) + self._props = props + + +@dataclass +class ReviewPrProps: + created_at: Optional[int] = None + + +class ReviewPr(Edge): + def __init__(self, source, target, props: Optional[ReviewPrProps] = None): + if props is None: + props = ReviewPrProps() + if not isinstance(props, ReviewPrProps): + raise ValueError("props must be an instance of ReviewPrProps.") + super().__init__(label="review_pr", source=source, target=target) + self._props = props + + +@dataclass +class CommentPrProps: + created_at: Optional[int] = None + + +class CommentPr(Edge): + def __init__(self, source, target, props: Optional[CommentPrProps] = None): + if props is None: + props = CommentPrProps() + if not isinstance(props, CommentPrProps): + raise ValueError("props must be an instance of CommentPrProps.") + super().__init__(label="comment_pr", source=source, target=target) + self._props = props + + +@dataclass +class ClosePrProps: + created_at: Optional[int] = None + + +class ClosePr(Edge): + def __init__(self, source, target, props: Optional[ClosePrProps] = None): + if props is None: + props = ClosePrProps() + if not isinstance(props, ClosePrProps): + raise ValueError("props must be an instance of ClosePrProps.") + super().__init__(label="close_pr", source=source, target=target) + self._props = props + + +@dataclass +class OpenPrProps: + created_at: Optional[int] = None + + +class OpenPr(Edge): + def __init__(self, source, target, props: Optional[OpenPrProps] = None): + if props is None: + props = OpenPrProps() + if not isinstance(props, OpenPrProps): + raise ValueError("props must be an instance of OpenPrProps.") + super().__init__(label="open_pr", source=source, target=target) + self._props = props + + +@dataclass +class CommentIssueProps: + created_at: Optional[int] = None + + +class CommentIssue(Edge): + def __init__(self, source, target, props: Optional[CommentIssueProps] = None): + if props is None: + props = CommentIssueProps() + if not isinstance(props, CommentIssueProps): + raise ValueError("props must be an instance of CommentIssueProps.") + super().__init__(label="comment_issue", source=source, target=target) + self._props = props + + +@dataclass +class CloseIssueProps: + created_at: Optional[int] = None + + +class CloseIssue(Edge): + def __init__(self, source, target, props: Optional[CloseIssueProps] = None): + if props is None: + props = CloseIssueProps() + if not isinstance(props, CloseIssueProps): + raise ValueError("props must be an instance of CloseIssueProps.") + super().__init__(label="close_issue", source=source, target=target) + self._props = props + + +@dataclass +class OpenIssueProps: + created_at: Optional[int] = None + + +class OpenIssue(Edge): + def __init__(self, source, target, props: Optional[OpenIssueProps] = None): + if props is None: + props = OpenIssueProps() + if not isinstance(props, OpenIssueProps): + raise ValueError("props must be an instance of OpenIssueProps.") + super().__init__(label="open_issue", source=source, target=target) + self._props = props + + +@dataclass +class HasPrProps: + created_at: Optional[int] = None + + +class HasPr(Edge): + def __init__(self, source, target, props: Optional[HasPrProps] = None): + if props is None: + props = HasPrProps() + if not isinstance(props, HasPrProps): + raise ValueError("props must be an instance of HasPrProps.") + super().__init__(label="has_pr", source=source, target=target) + self._props = props + + +@dataclass +class HasIssueProps: + created_at: Optional[int] = None + + +class HasIssue(Edge): + def __init__(self, source, target, props: Optional[HasIssueProps] = None): + if props is None: + props = HasIssueProps() + if not isinstance(props, HasIssueProps): + raise ValueError("props must be an instance of HasIssueProps.") + super().__init__(label="has_issue", source=source, target=target) + self._props = props + + +class UseLang(Edge): + def __init__(self, source, target): + super().__init__(label="use_lang", source=source, target=target) + self._props = None + + +class HasTopic(Edge): + def __init__(self, source, target): + super().__init__(label="has_topic", source=source, target=target) + self._props = None + + +class UseLicense(Edge): + def __init__(self, source, target): + super().__init__(label="use_license", source=source, target=target) + self._props = None + + + diff --git a/osgraph-service-py/app/models/graph_view.py b/osgraph-service-py/app/models/graph_view.py new file mode 100644 index 0000000..a29b7b0 --- /dev/null +++ b/osgraph-service-py/app/models/graph_view.py @@ -0,0 +1,219 @@ +# app/models/graph_view.py + +from dataclasses import dataclass, field, asdict +from typing import List, Optional, Literal +import json +@dataclass +class Vertex: + id: int | str + name: str + comment: Optional[str] = None + source: Optional[str] = None + size: Optional[int] = None + @property + def vertex_type(self): + return self.__class__.__name__ + + def to_dict(self) -> dict: + data = asdict(self) + data['type'] = self.vertex_type + return data + +@dataclass +class Edge: + sid: str | int + tid: str | int + id: str | int + name: str + direction: Literal['both','out','in'] = 'out' + comment: Optional[int] = None + weight: Optional[int] = None + count: Optional[int] = None + + @property + def edge_type(self): + return self.__class__.__name__ + + def to_dict(self) -> dict: + data = asdict(self) + data['type'] = self.edge_type + return data + +@dataclass +class Graph: + vertices: List[Vertex] = field(default_factory=list) + edges: List[Edge] = field(default_factory=list) + summary:str = '' + + def update_summary(self,summary): + self.summary = summary + + def insert_entity(self, new_vertex: Vertex): + for i, vertex in enumerate(self.vertices): + if vertex.id == new_vertex.id: + # Update existing entity + self.vertices[i] = new_vertex + return + # Add new entity if not found + self.vertices.append(new_vertex) + + def insert_relationship(self, new_edge: Edge): + for i, edge in enumerate(self.edges): + if (edge.sid == new_edge.sid and + edge.tid == new_edge.tid and + edge.edge_type == new_edge.edge_type): + # Update existing relationship + self.edges[i] = new_edge + return + # Add new relationship if not found + self.edges.append(new_edge) + + def filter_entities(self, **criteria): + """Filter entities based on provided keyword arguments.""" + return [ + vertex for vertex in self.vertices + if all(getattr(vertex, key) == value for key, value in criteria.items()) + ] + + def filter_relationships(self, **criteria): + """Filter relationships based on provided keyword arguments.""" + return [ + edge for edge in self.edges + if all(getattr(edge, key) == value for key, value in criteria.items()) + ] + + def to_dict(self) -> str: + graph_dict = { + "vertices": [v.to_dict() for v in self.vertices], + "edges": [e.to_dict() for e in self.edges], + "summary": self.summary + } + return graph_dict + +# vertex class +class User(Vertex): + pass + +class Repo(Vertex): + pass + +class Orgnization(Vertex): + pass + +class Country(Vertex): + pass + +class Company(Vertex): + pass + +class Topic(Vertex): + pass + +class Contibution(Vertex): + pass + +class PR(Contibution): + pass + +class Issue(Contibution): + pass + +class Comment(Contibution): + pass + +class CodeReview(Comment): + pass + +class Commit(Contibution): + pass + +# edge class +@dataclass +class Create(Edge): + name: Literal['创建'] = field(default='创建', init=False) + +@dataclass +class CreatePR(Create): + name: Literal['创建 PR'] = field(default='创建 PR', init=False) + +@dataclass +class CreateIssue(Create): + name: Literal['创建 Issue'] = field(default='创建 Issue', init=False) + +@dataclass +class CreateCommit(Create): + name: Literal['创建 Commit'] = field(default='创建 Commit', init=False) + +@dataclass +class CreateComment(Create): + name: Literal['创建 Comment'] = field(default='创建 Comment', init=False) + +@dataclass +class CreateCR(Create): + name: Literal['创建 CR'] = field(default='创建 CR', init=False) + +@dataclass +class CodeReviewAction(Edge): + name: Literal['CR'] = field(default='CR', init=False) + +@dataclass +class Belong(Edge): + name: Literal['属于'] = field(default='属于', init=False) + +@dataclass +class Star(Edge): + name: Literal['Star'] = field(default='Star', init=False) + +@dataclass +class PullRequestAction(Edge): + name: Literal['PR'] = field(default='PR', init=False) + +@dataclass +class Push(PullRequestAction): + name: Literal['推送'] = field(default='推送', init=False) + +@dataclass +class CommitAction(PullRequestAction): + name: Literal['提交'] = field(default='提交', init=False) + +@dataclass +class CommentIssue(Edge): + name: Literal['评论 Issue'] = field(default='评论 Issue', init=False) + +@dataclass +class CommonIssue(Edge): + name: Literal['合作 Issue'] = field(default='合作 Issue', init=False) + +@dataclass +class CommonPR(Edge): + name: Literal['合作 PR'] = field(default='合作 PR', init=False) + +@dataclass +class CommonStar(Edge): + name: Literal['共同关注'] = field(default='共同关注', init=False) + +@dataclass +class CommonRepo(Edge): + name: Literal['合作项目'] = field(default='合作项目', init=False) + +@dataclass +class CommonDevelop(Edge): + name: Literal['共建'] = field(default='共建', init=False) + +@dataclass +class ContributeRepo(Edge): + name: Literal['贡献项目'] = field(default='贡献项目', init=False) + + + + + + + + + + + + + + diff --git a/osgraph-service-py/app/models/system_graph.py b/osgraph-service-py/app/models/system_graph.py new file mode 100644 index 0000000..8081107 --- /dev/null +++ b/osgraph-service-py/app/models/system_graph.py @@ -0,0 +1,27 @@ +# app/models/system_graph.py + +from dataclasses import dataclass + +@dataclass +class GraphServiceProps: + name: str = "" + comment: str = "" + input_types: str = "" + filter_keys: str = "" + + +class GraphService: + label: str = "graph_service" + primary: str = "name" + type: str = 'VERTEX' + props: GraphServiceProps = None + def __init__(self, name: str, comment: str, input_types: str, filter_keys: str): + self.props = GraphServiceProps( + name=name, + comment=comment, + input_types=input_types, + filter_keys=filter_keys + ) + + def __repr__(self): + return (f"GitHubUser(label={self.label}, pk={self.primary}, type={self.type}, props={self.props})") \ No newline at end of file diff --git a/osgraph-service-py/app/routes/__init__.py b/osgraph-service-py/app/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/osgraph-service-py/app/routes/develop_activities.py b/osgraph-service-py/app/routes/develop_activities.py new file mode 100644 index 0000000..4fee01a --- /dev/null +++ b/osgraph-service-py/app/routes/develop_activities.py @@ -0,0 +1,33 @@ +from flask import Blueprint, request, abort +from app.managers.develop_activities import DevelopActivitiesManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +develop_activities_bp = Blueprint('project_activities', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class DevelopActivitiesController: + def __init__(self): + self.manager = DevelopActivitiesManager() + + def get_activities_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: + try: + graph = self.manager.get_graph(data) + return ResponseHandler.success(graph) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = DevelopActivitiesController() + +@develop_activities_bp.route('/develop-activities', methods=['GET']) +def get_project_activities(): + data = request.args.to_dict() + response = controller.get_activities_graph(data) + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/fulltext_search.py b/osgraph-service-py/app/routes/fulltext_search.py new file mode 100644 index 0000000..1e46c76 --- /dev/null +++ b/osgraph-service-py/app/routes/fulltext_search.py @@ -0,0 +1,32 @@ +from flask import Blueprint, request +from app.managers.fulltext_search import FulltextSearchManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +fulltext_search_bp = Blueprint('fulltext_search', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class FulltextSearchController: + def __init__(self): + self.manager = FulltextSearchManager() + def search(self,data) -> Dict[str, Any]: + try: + result = self.manager.search(data=data) + return ResponseHandler.success(result) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = FulltextSearchController() + +@fulltext_search_bp.route('/fulltext-search', methods=['GET']) +def get_os_interest(): + data = request.args.to_dict() + response = controller.search(data) + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/graph_list.py b/osgraph-service-py/app/routes/graph_list.py new file mode 100644 index 0000000..8efb950 --- /dev/null +++ b/osgraph-service-py/app/routes/graph_list.py @@ -0,0 +1,31 @@ +from flask import Blueprint +from app.managers.graph_list import GraphListManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +graph_list_bp = Blueprint('graph_list', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class OSInterestController: + def __init__(self): + self.manager = GraphListManager() + def get_graph_list(self) -> Dict[str, Any]: + try: + result = self.manager.get_graph_list() + return ResponseHandler.success(result) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = OSInterestController() + +@graph_list_bp.route('/list', methods=['GET']) +def get_os_interest(): + response = controller.get_graph_list() + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/os_interest.py b/osgraph-service-py/app/routes/os_interest.py new file mode 100644 index 0000000..7bcb850 --- /dev/null +++ b/osgraph-service-py/app/routes/os_interest.py @@ -0,0 +1,39 @@ +from flask import Blueprint, request, abort +from app.managers.os_interest import OSInterestManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +os_interest_bp = Blueprint('os_interest', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class OSInterestController: + def __init__(self): + self.manager = OSInterestManager() + + def get_interest_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: + """ + 获取项目贡献的图数据 + """ + try: + graph = self.manager.get_graph(data) + return ResponseHandler.success(graph) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = OSInterestController() + +@os_interest_bp.route('/os-interest', methods=['GET']) +def get_os_interest(): + """ + 获取项目贡献的图谱 + """ + data = request.args.to_dict() + response = controller.get_interest_graph(data) + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/os_partner.py b/osgraph-service-py/app/routes/os_partner.py new file mode 100644 index 0000000..fa3f528 --- /dev/null +++ b/osgraph-service-py/app/routes/os_partner.py @@ -0,0 +1,33 @@ +from flask import Blueprint, request, abort +from app.managers.os_partner import OSPartnerManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +os_partner_bp = Blueprint('os_partner', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class OSPartnerController: + def __init__(self): + self.manager = OSPartnerManager() + + def get_partner_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: + try: + graph = self.manager.get_graph(data) + return ResponseHandler.success(graph) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = OSPartnerController() + +@os_partner_bp.route('/os-partner', methods=['GET']) +def get_os_partner(): + data = request.args.to_dict() + response = controller.get_partner_graph(data) + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/project_community.py b/osgraph-service-py/app/routes/project_community.py new file mode 100644 index 0000000..e598f28 --- /dev/null +++ b/osgraph-service-py/app/routes/project_community.py @@ -0,0 +1,33 @@ +from flask import Blueprint, request, abort +from app.managers.project_community import ProjectCommunityManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +project_community_bp = Blueprint('project_community', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class ProjectCommunityController: + def __init__(self): + self.manager = ProjectCommunityManager() + + def get_community_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: + try: + graph = self.manager.get_graph(data) + return ResponseHandler.success(graph) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = ProjectCommunityController() + +@project_community_bp.route('/project-community', methods=['GET']) +def get_project_community(): + data = request.args.to_dict() + response = controller.get_community_graph(data) + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/project_contribution.py b/osgraph-service-py/app/routes/project_contribution.py new file mode 100644 index 0000000..1599987 --- /dev/null +++ b/osgraph-service-py/app/routes/project_contribution.py @@ -0,0 +1,33 @@ +from flask import Blueprint, request, abort +from app.managers.project_contribution import ProjectContributionManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +project_contribution_bp = Blueprint('project_contribution', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class ProjectContributionController: + def __init__(self): + self.manager = ProjectContributionManager() + + def get_contribution_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: + try: + graph = self.manager.get_graph(data) + return ResponseHandler.success(graph) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = ProjectContributionController() + +@project_contribution_bp.route('/project-contribution', methods=['GET']) +def get_project_contribution(): + data = request.args.to_dict() + response = controller.get_contribution_graph(data) + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/project_ecology.py b/osgraph-service-py/app/routes/project_ecology.py new file mode 100644 index 0000000..9b7cc4c --- /dev/null +++ b/osgraph-service-py/app/routes/project_ecology.py @@ -0,0 +1,33 @@ +from flask import Blueprint, request, abort +from app.managers.project_ecology import ProjectEcologyManager +from app.utils.custom_exceptions import InvalidUsage +from app.utils.response_handler import ResponseHandler +from typing import Dict, Any +import logging +from dataclasses import asdict + +project_ecology_bp = Blueprint('project_ecology', __name__, url_prefix='/api/graph') +logger = logging.getLogger(__name__) + +class ProjectEcologyController: + def __init__(self): + self.manager = ProjectEcologyManager() + + def get_ecology_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: + try: + graph = self.manager.get_graph(data) + return ResponseHandler.success(graph) + except InvalidUsage as e: + logger.error(f"Invalid usage: {str(e)}") + return ResponseHandler.error(str(e.message), e.status_code) + except Exception as e: + logger.exception("Internal server error") + return ResponseHandler.error("Internal server error", 500) + +controller = ProjectEcologyController() + +@project_ecology_bp.route('/project-ecology', methods=['GET']) +def get_project_ecology(): + data = request.args.to_dict() + response = controller.get_ecology_graph(data) + return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/services/__init__.py b/osgraph-service-py/app/services/__init__.py new file mode 100644 index 0000000..ca4ad9a --- /dev/null +++ b/osgraph-service-py/app/services/__init__.py @@ -0,0 +1,26 @@ +# app/services/__init__.py + +from flask import current_app +from app.services.graph_services.project_contribution import ProjectContributionServiceConfig +from app.services.graph_services.project_ecology import ProjectEcologyServiceConfig +from app.services.graph_services.project_community import ProjectCommunityServiceConfig +from app.services.graph_services.develop_activities import DevelopActivitiesServiceConfig +from app.services.graph_services.os_partner import OSPartnerServiceConfig +from app.services.graph_services.os_interest import OSInterestServiceConfig + +SERVICE_CONFIGS = [ + ProjectContributionServiceConfig(), + ProjectEcologyServiceConfig(), + ProjectCommunityServiceConfig(), + DevelopActivitiesServiceConfig(), + OSPartnerServiceConfig(), + OSInterestServiceConfig() +] + +def register_all_services(): + """ + 遍历所有的服务配置,并在应用启动时自动注册。 + """ + with current_app.app_context(): + for config in SERVICE_CONFIGS: + config.register_service() \ No newline at end of file diff --git a/osgraph-service-py/app/services/fulltext_search.py b/osgraph-service-py/app/services/fulltext_search.py new file mode 100644 index 0000000..38f9c10 --- /dev/null +++ b/osgraph-service-py/app/services/fulltext_search.py @@ -0,0 +1,23 @@ +from typing import Any +from app.dal.search.es import ElasticsearchClient +from app.utils.custom_exceptions import InvalidUsage +import os +from dotenv import load_dotenv +load_dotenv() + +class FulltextSearchService: + def execute(self, data) -> Any: + if "index-name" not in data: + raise InvalidUsage("Missing necessary parameter: index-name") + index_name = data["index-name"] + keyword = data["keyword"] + if not keyword: + return [] + query = { + "match": { + "name": keyword + } + } + client = ElasticsearchClient() + result = client.search(index = index_name, query = query, size=10) + return result diff --git a/osgraph-service-py/app/services/graph_list.py b/osgraph-service-py/app/services/graph_list.py new file mode 100644 index 0000000..db23484 --- /dev/null +++ b/osgraph-service-py/app/services/graph_list.py @@ -0,0 +1,13 @@ +from typing import Any +from app.dal.graph.tugraph import GraphClient +import os +from dotenv import load_dotenv +load_dotenv() + +class GraphListService: + def execute(self) -> Any: + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME") + client = GraphClient(graph_name) + cypher = f'''MATCH (n:graph_service) RETURN n''' + result = client.run(cypher) + return result diff --git a/osgraph-service-py/app/services/graph_services/__init__.py b/osgraph-service-py/app/services/graph_services/__init__.py new file mode 100644 index 0000000..5f6aac3 --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/__init__.py @@ -0,0 +1,4 @@ +# app/services/graph_services/__init__.py + + + diff --git a/osgraph-service-py/app/services/graph_services/base.py b/osgraph-service-py/app/services/graph_services/base.py new file mode 100644 index 0000000..8d33eec --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/base.py @@ -0,0 +1,78 @@ +from abc import ABC, abstractmethod +from typing import List, Dict, Any, Callable, Union +from dataclasses import dataclass +from app.utils.custom_exceptions import InvalidUsage +from dotenv import load_dotenv +from app.dal.graph.tugraph import GraphClient +from app.models.system_graph import GraphService +import os + +load_dotenv() + +@dataclass +class FilterKey: + key: str + type: str + default: Union[Callable[[], Any], Any] + required: bool = False + +@dataclass +class ServiceConfig: + name: str + comment: str + inputTypes: List[str] + filterKeys: List[FilterKey] + + def register_service(self) -> None: + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME") + client = GraphClient(graph_name) + properties = { + "name": self.name, + "comment": self.comment, + "input_types": ','.join(self.inputTypes), + "filter_keys": ','.join([f"{key.key}:{key.default}" for key in self.filterKeys]) + } + service = GraphService( + name=self.name, + comment=self.comment, + input_types=properties["input_types"], + filter_keys=properties["filter_keys"] + ) + client.upsert_vertex(GraphService.label,service.props) + + +class BaseService(ABC): + def __init__(self, config: ServiceConfig): + self.name: str = config.name + self.comment: str = config.comment + self.inputTypes: List[str] = config.inputTypes + self.filterKeys: List[FilterKey] = config.filterKeys + + def validate_params(self, data: Dict[str, Any]) -> Dict[str, Any]: + validated_filters: Dict[str, Any] = {} + for input_type in self.inputTypes: + if input_type not in data: + raise InvalidUsage(f"Missing necessary parameter: {input_type}") + validated_filters[input_type] = data[input_type] + for filter_key in self.filterKeys: + if filter_key.key not in data: + if filter_key.required: + raise InvalidUsage(f"Missing required filter key: {filter_key.key}") + value = filter_key.default() if callable(filter_key.default) else filter_key.default + else: + value = data[filter_key.key] + if filter_key.type == "int": + try: + validated_filters[filter_key.key] = int(value) + except ValueError: + raise InvalidUsage(f"Invalid value for {filter_key.key}: must be an integer.") + elif filter_key.type == "str": + validated_filters[filter_key.key] = str(value) + else: + validated_filters[filter_key.key] = value + + return validated_filters + + @abstractmethod + def execute(self, data: Dict[str, Any]): + pass diff --git a/osgraph-service-py/app/services/graph_services/develop_activities.py b/osgraph-service-py/app/services/graph_services/develop_activities.py new file mode 100644 index 0000000..0b5c15e --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/develop_activities.py @@ -0,0 +1,48 @@ +from datetime import datetime, timedelta +from typing import Dict, Any +from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from app.dal.graph.tugraph import GraphClient +from app.dal.search.es import ElasticsearchClient +import os +from dotenv import load_dotenv +load_dotenv() + +def get_default_start_time() -> int: + return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + +def get_default_end_time() -> int: + return int(datetime.now().timestamp() * 1000) + +class DevelopActivitiesServiceConfig(ServiceConfig): + def __init__(self): + super().__init__( + name="开发活动", + comment="这是一个开发活动图谱", + inputTypes=["GitHubUser"], + filterKeys=[ + FilterKey(key="topn", type="int", default=50, required=False), + ] + ) + +class DevelopActivitiesService(BaseService): + def __init__(self): + super().__init__(DevelopActivitiesServiceConfig()) + + def execute(self, data: Dict[str, Any]) -> Any: + validated_data = self.validate_params(data) + github_user: str = validated_data["GitHubUser"] + topn: int = validated_data["topn"] | 50 + es = ElasticsearchClient() + query = { + "term": { + "name.keyword": github_user + } + } + res = es.search(index='github_user',query=query) + if len(res): + develop_id = res[0]["id"] + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") + client = GraphClient(graph_name) + cypher = f'''CALL osgraph.get_developer_contribution('{{"developer_id":{develop_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + result = client.run(cypher) + return result diff --git a/osgraph-service-py/app/services/graph_services/os_interest.py b/osgraph-service-py/app/services/graph_services/os_interest.py new file mode 100644 index 0000000..b8d8fba --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/os_interest.py @@ -0,0 +1,50 @@ +from datetime import datetime, timedelta +from typing import Dict, Any +from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from app.dal.graph.tugraph import GraphClient +from app.dal.search.es import ElasticsearchClient +import os +from dotenv import load_dotenv +load_dotenv() + +def get_default_start_time() -> int: + return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + +def get_default_end_time() -> int: + return int(datetime.now().timestamp() * 1000) + +class OSInterestServiceConfig(ServiceConfig): + def __init__(self): + super().__init__( + name="开源兴趣", + comment="这是一张开源兴趣图谱", + inputTypes=["GitHubUser"], + filterKeys=[ + FilterKey(key="topic-topn", type="int", default=50, required=False), + FilterKey(key="githubrepo-topn", type="int", default=50, required=False), + ] + ) + +class OSInterestService(BaseService): + def __init__(self): + super().__init__(OSInterestServiceConfig()) + + def execute(self, data: Dict[str, Any]) -> Any: + validated_data = self.validate_params(data) + user_name: str = validated_data["GitHubUser"] + topic_topn: int = validated_data["topic-topn"] + repo_topn: int = validated_data["githubrepo-topn"] + es = ElasticsearchClient() + query = { + "term": { + "name.keyword": user_name + } + } + res = es.search(index='github_user',query=query) + if len(res): + user_id = res[0]["id"] + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") + client = GraphClient(graph_name) + cypher = f'''CALL osgraph.get_developer_repos_profile('{{"developer_id":{user_id},"topic_topn":{topic_topn},"repo_topn":{repo_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + result = client.run(cypher) + return result diff --git a/osgraph-service-py/app/services/graph_services/os_partner.py b/osgraph-service-py/app/services/graph_services/os_partner.py new file mode 100644 index 0000000..48a5a0e --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/os_partner.py @@ -0,0 +1,48 @@ +from datetime import datetime, timedelta +from typing import Dict, Any +from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from app.dal.graph.tugraph import GraphClient +from app.dal.search.es import ElasticsearchClient +import os +from dotenv import load_dotenv +load_dotenv() + +def get_default_start_time() -> int: + return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + +def get_default_end_time() -> int: + return int(datetime.now().timestamp() * 1000) + +class OSPartnerServiceConfig(ServiceConfig): + def __init__(self): + super().__init__( + name="开源伙伴", + comment="这是一个获取开源伙伴的图谱", + inputTypes=["GitHubUser"], + filterKeys=[ + FilterKey(key="topn", type="int", default=50, required=False), + ] + ) + +class OSPartnerService(BaseService): + def __init__(self): + super().__init__(OSPartnerServiceConfig()) + + def execute(self, data: Dict[str, Any]) -> Any: + validated_data = self.validate_params(data) + github_user: str = validated_data["GitHubUser"] + topn: int = validated_data["topn"] + es = ElasticsearchClient() + query = { + "term": { + "name.keyword": github_user + } + } + res = es.search(index='github_user',query=query) + if len(res): + user_id = res[0]["id"] + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") + client = GraphClient(graph_name) + cypher = f'''CALL osgraph.get_developer_by_developer('{{"developer_id":{user_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + result = client.run(cypher) + return result diff --git a/osgraph-service-py/app/services/graph_services/project_community.py b/osgraph-service-py/app/services/graph_services/project_community.py new file mode 100644 index 0000000..6f3da87 --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/project_community.py @@ -0,0 +1,52 @@ +from datetime import datetime, timedelta +from typing import Dict, Any +from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from app.dal.graph.tugraph import GraphClient +from app.dal.search.es import ElasticsearchClient +import os +from dotenv import load_dotenv +load_dotenv() + +def get_default_start_time() -> int: + return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + +def get_default_end_time() -> int: + return int(datetime.now().timestamp() * 1000) + +class ProjectCommunityServiceConfig(ServiceConfig): + def __init__(self): + super().__init__( + name="项目社区", + comment="这是一个项目社区图谱", + inputTypes=["GitHubRepo"], + filterKeys=[ + FilterKey(key="company-topn", type="int", default=50, required=False), + FilterKey(key="country-topn", type="int", default=50, required=False), + FilterKey(key="developer-topn", type="int", default=50, required=False), + ] + ) + +class ProjectCommunityService(BaseService): + def __init__(self): + super().__init__(ProjectCommunityServiceConfig()) + + def execute(self, data: Dict[str, Any]) -> Any: + validated_data = self.validate_params(data) + github_repo: str = validated_data["GitHubRepo"] + company_topn: int = validated_data["company-topn"] + country_topn: int = validated_data["country-topn"] + developer_topn: int = validated_data["developer-topn"] + es = ElasticsearchClient() + query = { + "term": { + "name.keyword": github_repo + } + } + res = es.search(index='github_repo',query=query) + if len(res): + repo_id = res[0]["id"] + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") + client = GraphClient(graph_name) + cypher = f'''CALL osgraph.get_repo_developers_profile('{{"repo_id":{repo_id},"company_topn":{company_topn},"country_topn":{country_topn},"developer_topn":{developer_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + result = client.run(cypher) + return result diff --git a/osgraph-service-py/app/services/graph_services/project_contribution.py b/osgraph-service-py/app/services/graph_services/project_contribution.py new file mode 100644 index 0000000..00c5655 --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/project_contribution.py @@ -0,0 +1,57 @@ +from datetime import datetime, timedelta +from typing import Dict, Any +from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from app.dal.graph.tugraph import GraphClient +from app.dal.search.es import ElasticsearchClient +import os +from dotenv import load_dotenv +load_dotenv() + +def get_default_start_time() -> int: + return int((datetime.now() - timedelta(days = 30)).timestamp() * 1000) + +def get_default_end_time() -> int: + return int(datetime.now().timestamp() * 1000) + +class ProjectContributionServiceConfig(ServiceConfig): + def __init__(self): + super().__init__( + name="项目贡献", + comment="这是一个获取项目贡献的图谱", + inputTypes=["GitHubRepo"], + filterKeys=[ + FilterKey(key="start-time", type="int", default=get_default_start_time(), required=False), + FilterKey(key="end-time", type="int", default=get_default_end_time(), required=False), + FilterKey(key="contribution-limit", type="int", default=50, required=False) + ] + ) + +class ProjectContributionService(BaseService): + def __init__(self): + super().__init__(ProjectContributionServiceConfig()) + + def execute(self, data: Dict[str, Any]) -> Any: + validated_data = self.validate_params(data) + github_repo: str = validated_data["GitHubRepo"] + start_time: int = validated_data["start-time"] or get_default_start_time() + end_time: int = validated_data["end-time"] or get_default_end_time() + config_name = os.getenv('FLASK_CONFIG') + # if config_name == 'development': + # start_time = 0 + start_time = 0 + contribution_limit: int = validated_data["contribution-limit"] + es = ElasticsearchClient() + query = { + "term": { + "name.keyword": github_repo + } + } + res = es.search(index='github_repo',query=query) + if len(res): + repo_id = res[0]["id"] + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") + client = GraphClient(graph_name) + cypher = f'''CALL osgraph.get_repo_contribution('{{"repo_id":{repo_id},"start_timestamp":{start_time},"end_timestamp":{end_time},"top_n":{contribution_limit}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + result = client.run(cypher) + return result + diff --git a/osgraph-service-py/app/services/graph_services/project_ecology.py b/osgraph-service-py/app/services/graph_services/project_ecology.py new file mode 100644 index 0000000..324c37e --- /dev/null +++ b/osgraph-service-py/app/services/graph_services/project_ecology.py @@ -0,0 +1,48 @@ +from datetime import datetime, timedelta +from typing import Dict, Any +from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from app.dal.graph.tugraph import GraphClient +from app.dal.search.es import ElasticsearchClient +import os +from dotenv import load_dotenv +load_dotenv() + +def get_default_start_time() -> int: + return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + +def get_default_end_time() -> int: + return int(datetime.now().timestamp() * 1000) + +class ProjectEcologyServiceConfig(ServiceConfig): + def __init__(self): + super().__init__( + name="项目生态", + comment="这是一个获取项目项目生态的图谱", + inputTypes=["GitHubRepo"], + filterKeys=[ + FilterKey(key="topn", type="int", default=50, required=False), + ] + ) + +class ProjectEcologyService(BaseService): + def __init__(self): + super().__init__(ProjectEcologyServiceConfig()) + + def execute(self, data: Dict[str, Any]) -> Any: + validated_data = self.validate_params(data) + github_repo: str = validated_data["GitHubRepo"] + top_n: int = validated_data["topn"] + es = ElasticsearchClient() + query = { + "term": { + "name.keyword": github_repo + } + } + res = es.search(index='github_repo',query=query) + if len(res): + repo_id = res[0]["id"] + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") + client = GraphClient(graph_name) + cypher = f'''CALL osgraph.get_repo_by_repo('{{"repo_id":{repo_id}, "top_n":{top_n}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + result = client.run(cypher) + return result diff --git a/osgraph-service-py/app/services/graph_summary.py b/osgraph-service-py/app/services/graph_summary.py new file mode 100644 index 0000000..ba71054 --- /dev/null +++ b/osgraph-service-py/app/services/graph_summary.py @@ -0,0 +1,8 @@ +from typing import Any +from app.llm.openai_client import OpenAIClient + +class GraphSummaryService: + def execute(self, data:dict) -> Any: + llm = OpenAIClient() + result = llm.summary_graph(graph_data=data) + return result diff --git a/osgraph-service-py/app/utils/custom_exceptions.py b/osgraph-service-py/app/utils/custom_exceptions.py new file mode 100644 index 0000000..7e9f34c --- /dev/null +++ b/osgraph-service-py/app/utils/custom_exceptions.py @@ -0,0 +1,15 @@ +# app/utils/custom_exceptions.py +class InvalidUsage(Exception): + status_code = 400 + + def __init__(self, message, status_code=None, payload=None): + super().__init__() + self.message = message + if status_code: + self.status_code = status_code + self.payload = payload + + def to_dict(self): + rv = dict(self.payload or ()) + rv['message'] = self.message + return rv diff --git a/osgraph-service-py/app/utils/logger.py b/osgraph-service-py/app/utils/logger.py new file mode 100644 index 0000000..5cd1123 --- /dev/null +++ b/osgraph-service-py/app/utils/logger.py @@ -0,0 +1,26 @@ +import logging +from logging.handlers import RotatingFileHandler +import os + +def setup_logger(app): + log_dir = os.path.join(app.root_path, 'logs') + if not os.path.exists(log_dir): + os.makedirs(log_dir) + + handler = RotatingFileHandler( + os.path.join(log_dir, 'app.log'), + maxBytes=10 * 1024 * 1024, # 10MB + backupCount=5 + ) + + formatter = logging.Formatter( + '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]' + ) + handler.setFormatter(formatter) + root_logger = logging.getLogger() + root_logger.setLevel(app.config['LOG_LEVEL']) + if not any(isinstance(h, RotatingFileHandler) for h in root_logger.handlers): + root_logger.addHandler(handler) + logging.getLogger('werkzeug').setLevel(logging.WARNING) + logging.getLogger('flask').setLevel(logging.WARNING) + root_logger.info('The logger has been started, log level: %s', logging.getLevelName(app.config['LOG_LEVEL'])) diff --git a/osgraph-service-py/app/utils/response_handler.py b/osgraph-service-py/app/utils/response_handler.py new file mode 100644 index 0000000..8880c6c --- /dev/null +++ b/osgraph-service-py/app/utils/response_handler.py @@ -0,0 +1,25 @@ +from flask import jsonify +from typing import Any, Dict, Optional + +class ResponseHandler: + @staticmethod + def success(data: Any = None, message: str = "Success") -> Dict[str, Any]: + return { + "status": 0, + "data": data, + "message": message, + "error": None + } + + @staticmethod + def error(message: str, status_code: int = 500, error_details: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + return { + "status": 1, + "data": None, + "message": message, + "error": error_details + } + + @staticmethod + def jsonify_response(response: Dict[str, Any], status_code: int = 200): + return jsonify(response), status_code diff --git a/osgraph-service-py/config.py b/osgraph-service-py/config.py new file mode 100644 index 0000000..44a358c --- /dev/null +++ b/osgraph-service-py/config.py @@ -0,0 +1,15 @@ +import logging +from dotenv import load_dotenv + +load_dotenv() + +class Config: + LOG_LEVEL = logging.INFO + +class DevelopmentConfig(Config): + DEBUG = True + LOG_LEVEL = logging.DEBUG + +class ProductionConfig(Config): + DEBUG = False + LOG_LEVEL = logging.INFO diff --git a/osgraph-service-py/poetry.lock b/osgraph-service-py/poetry.lock new file mode 100644 index 0000000..cb083d7 --- /dev/null +++ b/osgraph-service-py/poetry.lock @@ -0,0 +1,678 @@ +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.6.2.post1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "elastic-transport" +version = "8.15.1" +description = "Transport classes and utilities shared among Python Elastic client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "elastic_transport-8.15.1-py3-none-any.whl", hash = "sha256:b5e82ff1679d8c7705a03fd85c7f6ef85d6689721762d41228dd312e34f331fc"}, + {file = "elastic_transport-8.15.1.tar.gz", hash = "sha256:9cac4ab5cf9402668cf305ae0b7d93ddc0c7b61461d6d1027850db6da9cc5742"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.26.2,<3" + +[package.extras] +develop = ["aiohttp", "furo", "httpcore (<1.0.6)", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] + +[[package]] +name = "elasticsearch" +version = "8.16.0" +description = "Python client for Elasticsearch" +optional = false +python-versions = ">=3.8" +files = [ + {file = "elasticsearch-8.16.0-py3-none-any.whl", hash = "sha256:83d9fe09e8e95880559da43e44976c1e11cc63fe96bc0c0592f3d64f371772bf"}, + {file = "elasticsearch-8.16.0.tar.gz", hash = "sha256:d2aaa92f44ebea3c4147389aeba038c0b42a017f8c52ff35b1e7ebc34c49adb7"}, +] + +[package.dependencies] +elastic-transport = ">=8.15.1,<9" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +dev = ["aiohttp", "black", "build", "coverage", "isort", "jinja2", "mapbox-vector-tile", "nox", "numpy", "orjson", "pandas", "pyarrow", "pytest", "pytest-asyncio", "pytest-cov", "python-dateutil", "pyyaml (>=5.4)", "requests (>=2,<3)", "simsimd", "twine", "unasync"] +docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme (>=2.0)"] +orjson = ["orjson (>=3)"] +pyarrow = ["pyarrow (>=1)"] +requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"] +vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] + +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jiter" +version = "0.7.1" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jiter-0.7.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:262e96d06696b673fad6f257e6a0abb6e873dc22818ca0e0600f4a1189eb334f"}, + {file = "jiter-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be6de02939aac5be97eb437f45cfd279b1dc9de358b13ea6e040e63a3221c40d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935f10b802bc1ce2b2f61843e498c7720aa7f4e4bb7797aa8121eab017293c3d"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9cd3cccccabf5064e4bb3099c87bf67db94f805c1e62d1aefd2b7476e90e0ee2"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aa919ebfc5f7b027cc368fe3964c0015e1963b92e1db382419dadb098a05192"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ae2d01e82c94491ce4d6f461a837f63b6c4e6dd5bb082553a70c509034ff3d4"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f9568cd66dbbdab67ae1b4c99f3f7da1228c5682d65913e3f5f95586b3cb9a9"}, + {file = "jiter-0.7.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ecbf4e20ec2c26512736284dc1a3f8ed79b6ca7188e3b99032757ad48db97dc"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1a0508fddc70ce00b872e463b387d49308ef02b0787992ca471c8d4ba1c0fa1"}, + {file = "jiter-0.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f84c9996664c460f24213ff1e5881530abd8fafd82058d39af3682d5fd2d6316"}, + {file = "jiter-0.7.1-cp310-none-win32.whl", hash = "sha256:c915e1a1960976ba4dfe06551ea87063b2d5b4d30759012210099e712a414d9f"}, + {file = "jiter-0.7.1-cp310-none-win_amd64.whl", hash = "sha256:75bf3b7fdc5c0faa6ffffcf8028a1f974d126bac86d96490d1b51b3210aa0f3f"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ad04a23a91f3d10d69d6c87a5f4471b61c2c5cd6e112e85136594a02043f462c"}, + {file = "jiter-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e47a554de88dff701226bb5722b7f1b6bccd0b98f1748459b7e56acac2707a5"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e44fff69c814a2e96a20b4ecee3e2365e9b15cf5fe4e00869d18396daa91dab"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df0a1d05081541b45743c965436f8b5a1048d6fd726e4a030113a2699a6046ea"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f22cf8f236a645cb6d8ffe2a64edb5d2b66fb148bf7c75eea0cb36d17014a7bc"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8589f50b728ea4bf22e0632eefa125c8aa9c38ed202a5ee6ca371f05eeb3ff"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f20de711224f2ca2dbb166a8d512f6ff48c9c38cc06b51f796520eb4722cc2ce"}, + {file = "jiter-0.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a9803396032117b85ec8cbf008a54590644a062fedd0425cbdb95e4b2b60479"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3d8bae77c82741032e9d89a4026479061aba6e646de3bf5f2fc1ae2bbd9d06e0"}, + {file = "jiter-0.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3dc9939e576bbc68c813fc82f6620353ed68c194c7bcf3d58dc822591ec12490"}, + {file = "jiter-0.7.1-cp311-none-win32.whl", hash = "sha256:f7605d24cd6fab156ec89e7924578e21604feee9c4f1e9da34d8b67f63e54892"}, + {file = "jiter-0.7.1-cp311-none-win_amd64.whl", hash = "sha256:f3ea649e7751a1a29ea5ecc03c4ada0a833846c59c6da75d747899f9b48b7282"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ad36a1155cbd92e7a084a568f7dc6023497df781adf2390c345dd77a120905ca"}, + {file = "jiter-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7ba52e6aaed2dc5c81a3d9b5e4ab95b039c4592c66ac973879ba57c3506492bb"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b7de0b6f6728b678540c7927587e23f715284596724be203af952418acb8a2d"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9463b62bd53c2fb85529c700c6a3beb2ee54fde8bef714b150601616dcb184a6"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:627164ec01d28af56e1f549da84caf0fe06da3880ebc7b7ee1ca15df106ae172"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25d0e5bf64e368b0aa9e0a559c3ab2f9b67e35fe7269e8a0d81f48bbd10e8963"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c244261306f08f8008b3087059601997016549cb8bb23cf4317a4827f07b7d74"}, + {file = "jiter-0.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ded4e4b75b68b843b7cea5cd7c55f738c20e1394c68c2cb10adb655526c5f1b"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:80dae4f1889b9d09e5f4de6b58c490d9c8ce7730e35e0b8643ab62b1538f095c"}, + {file = "jiter-0.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5970cf8ec943b51bce7f4b98d2e1ed3ada170c2a789e2db3cb484486591a176a"}, + {file = "jiter-0.7.1-cp312-none-win32.whl", hash = "sha256:701d90220d6ecb3125d46853c8ca8a5bc158de8c49af60fd706475a49fee157e"}, + {file = "jiter-0.7.1-cp312-none-win_amd64.whl", hash = "sha256:7824c3ecf9ecf3321c37f4e4d4411aad49c666ee5bc2a937071bdd80917e4533"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:097676a37778ba3c80cb53f34abd6943ceb0848263c21bf423ae98b090f6c6ba"}, + {file = "jiter-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3298af506d4271257c0a8f48668b0f47048d69351675dd8500f22420d4eec378"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12fd88cfe6067e2199964839c19bd2b422ca3fd792949b8f44bb8a4e7d21946a"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dacca921efcd21939123c8ea8883a54b9fa7f6545c8019ffcf4f762985b6d0c8"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de3674a5fe1f6713a746d25ad9c32cd32fadc824e64b9d6159b3b34fd9134143"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65df9dbae6d67e0788a05b4bad5706ad40f6f911e0137eb416b9eead6ba6f044"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ba9a358d59a0a55cccaa4957e6ae10b1a25ffdabda863c0343c51817610501d"}, + {file = "jiter-0.7.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576eb0f0c6207e9ede2b11ec01d9c2182973986514f9c60bc3b3b5d5798c8f50"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e550e29cdf3577d2c970a18f3959e6b8646fd60ef1b0507e5947dc73703b5627"}, + {file = "jiter-0.7.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:81d968dbf3ce0db2e0e4dec6b0a0d5d94f846ee84caf779b07cab49f5325ae43"}, + {file = "jiter-0.7.1-cp313-none-win32.whl", hash = "sha256:f892e547e6e79a1506eb571a676cf2f480a4533675f834e9ae98de84f9b941ac"}, + {file = "jiter-0.7.1-cp313-none-win_amd64.whl", hash = "sha256:0302f0940b1455b2a7fb0409b8d5b31183db70d2b07fd177906d83bf941385d1"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c65a3ce72b679958b79d556473f192a4dfc5895e8cc1030c9f4e434690906076"}, + {file = "jiter-0.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e80052d3db39f9bb8eb86d207a1be3d9ecee5e05fdec31380817f9609ad38e60"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a497859c4f3f7acd71c8bd89a6f9cf753ebacacf5e3e799138b8e1843084e3"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1288bc22b9e36854a0536ba83666c3b1fb066b811019d7b682c9cf0269cdf9f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b096ca72dd38ef35675e1d3b01785874315182243ef7aea9752cb62266ad516f"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbd52c50b605af13dbee1a08373c520e6fcc6b5d32f17738875847fea4e2cd"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af29c5c6eb2517e71ffa15c7ae9509fa5e833ec2a99319ac88cc271eca865519"}, + {file = "jiter-0.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f114a4df1e40c03c0efbf974b376ed57756a1141eb27d04baee0680c5af3d424"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:191fbaee7cf46a9dd9b817547bf556facde50f83199d07fc48ebeff4082f9df4"}, + {file = "jiter-0.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e2b445e5ee627fb4ee6bbceeb486251e60a0c881a8e12398dfdff47c56f0723"}, + {file = "jiter-0.7.1-cp38-none-win32.whl", hash = "sha256:47ac4c3cf8135c83e64755b7276339b26cd3c7ddadf9e67306ace4832b283edf"}, + {file = "jiter-0.7.1-cp38-none-win_amd64.whl", hash = "sha256:60b49c245cd90cde4794f5c30f123ee06ccf42fb8730a019a2870cd005653ebd"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8f212eeacc7203256f526f550d105d8efa24605828382cd7d296b703181ff11d"}, + {file = "jiter-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9e247079d88c00e75e297e6cb3a18a039ebcd79fefc43be9ba4eb7fb43eb726"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0aacaa56360139c53dcf352992b0331f4057a0373bbffd43f64ba0c32d2d155"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc1b55314ca97dbb6c48d9144323896e9c1a25d41c65bcb9550b3e0c270ca560"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f281aae41b47e90deb70e7386558e877a8e62e1693e0086f37d015fa1c102289"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93c20d2730a84d43f7c0b6fb2579dc54335db742a59cf9776d0b80e99d587382"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e81ccccd8069110e150613496deafa10da2f6ff322a707cbec2b0d52a87b9671"}, + {file = "jiter-0.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a7d5e85766eff4c9be481d77e2226b4c259999cb6862ccac5ef6621d3c8dcce"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f52ce5799df5b6975439ecb16b1e879d7655e1685b6e3758c9b1b97696313bfb"}, + {file = "jiter-0.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0c91a0304373fdf97d56f88356a010bba442e6d995eb7773cbe32885b71cdd8"}, + {file = "jiter-0.7.1-cp39-none-win32.whl", hash = "sha256:5c08adf93e41ce2755970e8aa95262298afe2bf58897fb9653c47cd93c3c6cdc"}, + {file = "jiter-0.7.1-cp39-none-win_amd64.whl", hash = "sha256:6592f4067c74176e5f369228fb2995ed01400c9e8e1225fb73417183a5e635f0"}, + {file = "jiter-0.7.1.tar.gz", hash = "sha256:448cf4f74f7363c34cdef26214da527e8eeffd88ba06d0b80b485ad0667baf5d"}, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "neo4j" +version = "5.26.0" +description = "Neo4j Bolt driver for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "neo4j-5.26.0-py3-none-any.whl", hash = "sha256:511a6a9468ca89b521bf686f885a2070acc462b1d09821d43710bd477acdf11e"}, + {file = "neo4j-5.26.0.tar.gz", hash = "sha256:51b25ba127b7b9fdae1ddf48ae697ddfab331e60f4b6d8488d1fc1f74ec60dcc"}, +] + +[package.dependencies] +pytz = "*" + +[package.extras] +numpy = ["numpy (>=1.7.0,<2.0.0)"] +pandas = ["numpy (>=1.7.0,<2.0.0)", "pandas (>=1.1.0,<3.0.0)"] +pyarrow = ["pyarrow (>=1.0.0)"] + +[[package]] +name = "openai" +version = "1.54.5" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "openai-1.54.5-py3-none-any.whl", hash = "sha256:f55a4450f38501814b53e76311ed7845a6f7f35bab46d0fb2a3728035d7a72d8"}, + {file = "openai-1.54.5.tar.gz", hash = "sha256:2aab4f9755a3e1e04d8a45ac1f4ce7b6948bab76646020c6386256d7e5cbb7e0"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + +[[package]] +name = "pydantic" +version = "2.9.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "tqdm" +version = "4.67.0" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"}, + {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "werkzeug" +version = "3.1.2" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +files = [ + {file = "werkzeug-3.1.2-py3-none-any.whl", hash = "sha256:4f7d1a5de312c810a8a2c6f0b47e9f6a7cffb7c8322def35e4d4d9841ff85597"}, + {file = "werkzeug-3.1.2.tar.gz", hash = "sha256:f471a4cd167233077e9d2a8190c3471c5bc520c636a9e3c1e9300c33bced03bc"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "ab5f656fadab68e9f4628db597dc01518d7fdbfc62a84ff032fcabcd6428ddba" diff --git a/osgraph-service-py/pyproject.toml b/osgraph-service-py/pyproject.toml new file mode 100644 index 0000000..3499e48 --- /dev/null +++ b/osgraph-service-py/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "osgraph-service" +version = "0.1.0" +description = "" +authors = ["KingSkyLi <15566300566@163.com>"] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +flask = "^3.0.3" +python-dotenv = "^1.0.1" +neo4j = "^5.26.0" +elasticsearch = "^8.16.0" +openai = "^1.54.5" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/osgraph-service-py/run.py b/osgraph-service-py/run.py new file mode 100644 index 0000000..75cfee7 --- /dev/null +++ b/osgraph-service-py/run.py @@ -0,0 +1,20 @@ +# run.py +import os +from app import create_app +from dotenv import load_dotenv +from config import DevelopmentConfig, ProductionConfig + +load_dotenv() + +config_mapping = { + 'development': DevelopmentConfig, + 'production': ProductionConfig +} + +config_name = os.getenv('FLASK_CONFIG', 'production') +config_class = config_mapping.get(config_name.lower(), ProductionConfig) + +app = create_app(config_class=config_class) + +if __name__ == '__main__': + app.run(port=8000, debug=app.config.get('DEBUG', False)) From caeae38edb16ca9b0d838aab128a5068ccfd07cf Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Thu, 21 Nov 2024 15:01:37 +0800 Subject: [PATCH 02/61] run black .; --- osgraph-service-py/app/__init__.py | 16 +- osgraph-service-py/app/dal/graph/tugraph.py | 164 +++++--- osgraph-service-py/app/dal/search/es.py | 21 +- osgraph-service-py/app/llm/openai_client.py | 25 +- .../app/llm/prompt_templates/graph_summary.py | 2 +- .../app/managers/develop_activities.py | 82 +++- .../app/managers/fulltext_search.py | 6 +- osgraph-service-py/app/managers/graph_list.py | 5 +- .../app/managers/os_interest.py | 60 ++- osgraph-service-py/app/managers/os_partner.py | 71 +++- .../app/managers/project_community.py | 90 ++-- .../app/managers/project_contribution.py | 81 +++- .../app/managers/project_ecology.py | 50 ++- .../app/models/default_graph.py | 30 +- osgraph-service-py/app/models/graph_view.py | 124 +++--- osgraph-service-py/app/models/system_graph.py | 13 +- .../app/routes/develop_activities.py | 9 +- .../app/routes/fulltext_search.py | 10 +- osgraph-service-py/app/routes/graph_list.py | 8 +- osgraph-service-py/app/routes/os_interest.py | 7 +- osgraph-service-py/app/routes/os_partner.py | 7 +- .../app/routes/project_community.py | 7 +- .../app/routes/project_contribution.py | 9 +- .../app/routes/project_ecology.py | 7 +- osgraph-service-py/app/services/__init__.py | 13 +- .../app/services/fulltext_search.py | 12 +- osgraph-service-py/app/services/graph_list.py | 6 +- .../app/services/graph_services/__init__.py | 3 - .../app/services/graph_services/base.py | 24 +- .../graph_services/develop_activities.py | 21 +- .../services/graph_services/os_interest.py | 23 +- .../app/services/graph_services/os_partner.py | 19 +- .../graph_services/project_community.py | 19 +- .../graph_services/project_contribution.py | 44 +- .../graph_services/project_ecology.py | 19 +- .../app/services/graph_summary.py | 3 +- .../app/utils/custom_exceptions.py | 2 +- osgraph-service-py/app/utils/logger.py | 24 +- .../app/utils/response_handler.py | 21 +- osgraph-service-py/config.py | 5 +- osgraph-service-py/poetry.lock | 388 +++++++++++++++++- osgraph-service-py/pyproject.toml | 23 ++ osgraph-service-py/run.py | 11 +- 43 files changed, 1170 insertions(+), 414 deletions(-) diff --git a/osgraph-service-py/app/__init__.py b/osgraph-service-py/app/__init__.py index 26e33c4..19d1610 100644 --- a/osgraph-service-py/app/__init__.py +++ b/osgraph-service-py/app/__init__.py @@ -8,9 +8,11 @@ from app.dal.graph.tugraph import GraphClient, GraphLabel, LabelProps from app.models.system_graph import GraphService from dotenv import load_dotenv + load_dotenv() -def create_app(config_class: str = 'config.ProductionConfig') -> Flask: + +def create_app(config_class: str = "config.ProductionConfig") -> Flask: app = Flask(__name__) app.config.from_object(config_class) setup_logger(app) @@ -21,6 +23,7 @@ def create_app(config_class: str = 'config.ProductionConfig') -> Flask: register_error_handlers(app) return app + def register_blueprints(app: Flask, blueprint_folder: str = "routes") -> None: current_dir = os.path.dirname(os.path.abspath(__file__)) blueprints_path = os.path.join(current_dir, blueprint_folder) @@ -53,14 +56,15 @@ def not_found(error): def internal_error(error): app.logger.error("Internal Server Error") return jsonify({"message": "Internal server error"}), 500 - + + def initialize_system_graph(app: Flask): """ 初始化 system_graph 和 graph_service 的基础库表。 """ - graph_name = os.getenv('TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME') + graph_name = os.getenv("TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME") client = GraphClient(graph_name) - + try: system_graph = client.get_graph() if system_graph: @@ -77,9 +81,9 @@ def initialize_system_graph(app: Flask): primary=GraphService.primary, type=GraphService.type, properties=[ - LabelProps(name=key, type="string", optional=True) + LabelProps(name=key, type="string", optional=True) for key in vars(GraphService.props).keys() - ] + ], ) client.create_label(label) app.logger.info("graph_service Label 已创建") diff --git a/osgraph-service-py/app/dal/graph/tugraph.py b/osgraph-service-py/app/dal/graph/tugraph.py index 641a2ac..bf1b33c 100644 --- a/osgraph-service-py/app/dal/graph/tugraph.py +++ b/osgraph-service-py/app/dal/graph/tugraph.py @@ -10,14 +10,16 @@ load_dotenv() + @dataclass class LabelProps: - name:str + name: str type: str optional: bool index: Optional[bool] = None -@dataclass + +@dataclass class GraphLabel: label: str primary: str @@ -39,16 +41,22 @@ def to_dict(self) -> Dict[str, Any]: "label": self.label, "primary": self.primary, "type": self.type, - "properties": [prop.__dict__ for prop in self.properties] if self.properties else [] + "properties": ( + [prop.__dict__ for prop in self.properties] if self.properties else [] + ), } + class GraphClient: def __init__(self, graph_name): TUGRAPHDB_HOST = os.getenv("TUGRAPHDB_HOST") TUGRAPHDB_PORT = os.getenv("TUGRAPHDB_PORT") TUGRAPHDB_USER = os.getenv("TUGRAPHDB_USER") TUGRAPHDB_PASSWORD = os.getenv("TUGRAPHDB_PASSWORD") - self.driver = GraphDatabase.driver(f'bolt://{TUGRAPHDB_HOST}:{TUGRAPHDB_PORT}', auth=(TUGRAPHDB_USER, TUGRAPHDB_PASSWORD)) + self.driver = GraphDatabase.driver( + f"bolt://{TUGRAPHDB_HOST}:{TUGRAPHDB_PORT}", + auth=(TUGRAPHDB_USER, TUGRAPHDB_PASSWORD), + ) self.graph_name = graph_name def close(self): @@ -67,21 +75,22 @@ def create_label(self, label: GraphLabel): except Exception as e: current_app.logger.info(f"Label '{label}' may already exist. {str(e)}") - def get_label(self,label_type:str,label_name:str) -> Dict[str,any]: + def get_label(self, label_type: str, label_name: str) -> Dict[str, any]: try: with self.driver.session(database=self.graph_name) as session: - if label_type == 'vertex': + if label_type == "vertex": query = f"""CALL db.getVertexSchema('{label_name}')""" else: query = f"""CALL db.getEdgeSchema('{label_name}')""" result = session.run(query).data() return json.dumps(result) except Exception as e: - current_app.logger.info(f"Faild to get {label_type} {label_name} . Errormessage: {str(e)}") - + current_app.logger.info( + f"Faild to get {label_type} {label_name} . Errormessage: {str(e)}" + ) # 创建节点 - def create_vertex(self, label:str, properties:Dict[str,any]): + def create_vertex(self, label: str, properties: Dict[str, any]): try: properties_str = self._convert_dict_to_str(properties) query = f""" @@ -89,11 +98,15 @@ def create_vertex(self, label:str, properties:Dict[str,any]): """ with self.driver.session(database=self.graph_name) as session: result = session.run(query) - current_app.logger.info(f"Vertex '{ json.dumps(properties)}' created success.") + current_app.logger.info( + f"Vertex '{ json.dumps(properties)}' created success." + ) return result.data() except Exception as e: - current_app.logger.info(f"Vertex '{ json.dumps(properties)}' created faild. Error message : {str(e)}") - + current_app.logger.info( + f"Vertex '{ json.dumps(properties)}' created faild. Error message : {str(e)}" + ) + # 创建边 def create_relationship( self, @@ -102,7 +115,7 @@ def create_relationship( dst_label: str = "", dst_filter: Optional[Dict[str, Any]] = None, relationship_type: str = "", - properties: Optional[Dict[str, Any]] = None + properties: Optional[Dict[str, Any]] = None, ) -> None: try: query = f""" @@ -131,11 +144,16 @@ def create_relationship( """ with self.driver.session(database=self.graph_name) as session: result = session.run(query) - current_app.logger.info(f"Relationship '{json.dumps(properties)}' created.") + current_app.logger.info( + f"Relationship '{json.dumps(properties)}' created." + ) return result.data() except Exception as e: - current_app.logger.error(f"Relationship '{json.dumps(properties)}' creation failed. Error message: {str(e)}") - return None + current_app.logger.error( + f"Relationship '{json.dumps(properties)}' creation failed. Error message: {str(e)}" + ) + return None + def delete_relationship( self, src_label: str = "", @@ -175,6 +193,7 @@ def delete_relationship( print("Relationship deleted successfully.") except Exception as e: print(f"Failed to delete relationship: {e}") + def upsert_vertex(self, label, properties): try: with self.driver.session(database=self.graph_name) as session: @@ -196,8 +215,13 @@ def get_vertex(self, vertex_instance: Vertex, limit: Optional[int] = None): query = f"MATCH (n:{label})" if filters: conditions = [ - f"n.{key} = '{value}'" if isinstance(value, str) else f"n.{key} = {value}" - for key, value in asdict(filters).items() if value is not None + ( + f"n.{key} = '{value}'" + if isinstance(value, str) + else f"n.{key} = {value}" + ) + for key, value in asdict(filters).items() + if value is not None ] if conditions: query += " WHERE " + " AND ".join(conditions) @@ -212,7 +236,9 @@ def get_vertex(self, vertex_instance: Vertex, limit: Optional[int] = None): print(f"Error fetching vertex: {e}") return None - def get_edge(self, edge_instance: Edge, deep: int = 3, limit: int = 50) -> Optional[list]: + def get_edge( + self, edge_instance: Edge, deep: int = 3, limit: int = 50 + ) -> Optional[list]: if not isinstance(edge_instance, Edge): raise ValueError("Input must be an instance of an Edge-derived class.") @@ -231,22 +257,37 @@ def get_edge(self, edge_instance: Edge, deep: int = 3, limit: int = 50) -> Optio # Add source (n) filters if hasattr(source, "props") and source.props: conditions += [ - f"n.{key} = '{value}'" if isinstance(value, str) else f"n.{key} = {value}" - for key, value in asdict(source.props).items() if value is not None + ( + f"n.{key} = '{value}'" + if isinstance(value, str) + else f"n.{key} = {value}" + ) + for key, value in asdict(source.props).items() + if value is not None ] # Add relationship (r) filters if props: conditions += [ - f"r.{key} = '{value}'" if isinstance(value, str) else f"r.{key} = {value}" - for key, value in asdict(props).items() if value is not None + ( + f"r.{key} = '{value}'" + if isinstance(value, str) + else f"r.{key} = {value}" + ) + for key, value in asdict(props).items() + if value is not None ] # Add target (m) filters if hasattr(target, "props") and target.props: conditions += [ - f"m.{key} = '{value}'" if isinstance(value, str) else f"m.{key} = {value}" - for key, value in asdict(target.props).items() if value is not None + ( + f"m.{key} = '{value}'" + if isinstance(value, str) + else f"m.{key} = {value}" + ) + for key, value in asdict(target.props).items() + if value is not None ] # Append WHERE clause if conditions exist @@ -264,29 +305,38 @@ def get_edge(self, edge_instance: Edge, deep: int = 3, limit: int = 50) -> Optio print(f"Error fetching edge: {e}") return None - def get_graph(self) -> Optional[dict]: - with self.driver.session(database='default') as session: + with self.driver.session(database="default") as session: graph_list = session.run("CALL dbms.graph.listGraphs()").data() - result = next((graph for graph in graph_list if graph.get("graph_name") == self.graph_name), None) + result = next( + ( + graph + for graph in graph_list + if graph.get("graph_name") == self.graph_name + ), + None, + ) return result def create_graph(self): """Create a new graph in the database if it doesn't already exist.""" try: with self.driver.session(database="default") as session: - session.run(f"CALL dbms.graph.createGraph('{self.graph_name}', '', 2048)") + session.run( + f"CALL dbms.graph.createGraph('{self.graph_name}', '', 2048)" + ) except Exception as e: - raise Exception(f"Failed to create graph '{self.graph_name}': {str(e)}") from e - - def run(self,cypher:str) -> Any: + raise Exception( + f"Failed to create graph '{self.graph_name}': {str(e)}" + ) from e + + def run(self, cypher: str) -> Any: try: with self.driver.session(database=self.graph_name) as session: - result = session.run(cypher) - return result.data() + result = session.run(cypher) + return result.data() except Exception as e: raise Exception(f"Error : {e}") - def _convert_dict_to_str(self, properties: Any) -> str: if not properties: @@ -295,7 +345,7 @@ def _convert_dict_to_str(self, properties: Any) -> str: # 如果是 dataclass,则将其转换为字典 if is_dataclass(properties): properties = asdict(properties) - + def convert_value(value: Any) -> str: if isinstance(value, str): return f'"{value}"' @@ -304,23 +354,27 @@ def convert_value(value: Any) -> str: elif isinstance(value, list): return "[" + ", ".join(convert_value(item) for item in value) + "]" elif isinstance(value, dict): - return "{" + ", ".join(f"{k}: {convert_value(v)}" for k, v in value.items()) + "}" + return ( + "{" + + ", ".join(f"{k}: {convert_value(v)}" for k, v in value.items()) + + "}" + ) elif callable(value): return convert_value(value()) else: return f'"{str(value)}"' + properties_str = ( "{" + ", ".join(f"{k}: {convert_value(v)}" for k, v in properties.items()) + "}" ) return properties_str - def _parse_edge_result(self, query_result: list) -> list: parsed_results = [] for record in query_result: - path = record.get('p') # Extract the Path object from the result + path = record.get("p") # Extract the Path object from the result if path: # Extract nodes (start and end nodes are the first and last in the list) @@ -339,22 +393,20 @@ def _parse_edge_result(self, query_result: list) -> list: # Extract relationships along the path relationships = [] for relationship in path.relationships: - relationships.append({ - "type": relationship.type, - "element_id": relationship.element_id, - "properties": relationship._properties, - }) - - parsed_results.append({ - "start": start_node, - "relationships": relationships, - "end": end_node, - }) + relationships.append( + { + "type": relationship.type, + "element_id": relationship.element_id, + "properties": relationship._properties, + } + ) + + parsed_results.append( + { + "start": start_node, + "relationships": relationships, + "end": end_node, + } + ) return parsed_results - - - - - - diff --git a/osgraph-service-py/app/dal/search/es.py b/osgraph-service-py/app/dal/search/es.py index 7ae7cab..a9511ef 100644 --- a/osgraph-service-py/app/dal/search/es.py +++ b/osgraph-service-py/app/dal/search/es.py @@ -5,12 +5,16 @@ import logging from dotenv import load_dotenv import os + load_dotenv() logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) + class ElasticsearchClient: - def __init__(self,): + def __init__( + self, + ): """初始化 Elasticsearch 连接""" ES_HOST = os.getenv("ES_HOST") ES_PORT = os.getenv("ES_PORT") @@ -19,8 +23,10 @@ def __init__(self,): try: self.es = Elasticsearch( hosts=[f"http://{ES_HOST}:{ES_PORT}"], - basic_auth=(ES_USERNAME, ES_PASSWORD) if ES_USERNAME and ES_PASSWORD else None, - verify_certs=False + basic_auth=( + (ES_USERNAME, ES_PASSWORD) if ES_USERNAME and ES_PASSWORD else None + ), + verify_certs=False, ) if self.es.ping(): logger.info("Connected to Elasticsearch successfully.") @@ -30,10 +36,12 @@ def __init__(self,): logger.error(f"Error connecting to Elasticsearch: {e}") raise - def search(self, index: str, query: Dict[str, Any], size: int = 10) -> List[Dict[str, Any]]: + def search( + self, index: str, query: Dict[str, Any], size: int = 10 + ) -> List[Dict[str, Any]]: try: response = self.es.search(index=index, query=query, size=size) - return [hit["_source"] for hit in response['hits']['hits']] + return [hit["_source"] for hit in response["hits"]["hits"]] except NotFoundError: logger.error(f"Index '{index}' not found.") return [] @@ -74,6 +82,3 @@ def delete(self, index: str, doc_id: str) -> bool: def close(self): self.es.close() logger.info("Elasticsearch connection closed.") - - - diff --git a/osgraph-service-py/app/llm/openai_client.py b/osgraph-service-py/app/llm/openai_client.py index 7eeee17..dffd1cf 100644 --- a/osgraph-service-py/app/llm/openai_client.py +++ b/osgraph-service-py/app/llm/openai_client.py @@ -2,34 +2,41 @@ import os from dotenv import load_dotenv from app.llm.prompt_templates.graph_summary import get_graph_summary_prompt + load_dotenv() + + class OpenAIClient: def __init__(self): self.client = OpenAI( - api_key=os.getenv('OPENAI_KEY'), - base_url=os.getenv('BASEURL'), + api_key=os.getenv("OPENAI_KEY"), + base_url=os.getenv("BASEURL"), ) def ask_question(self, question: str, context: str = "", temperature: float = 0.7): try: - model=os.getenv('MODEL') + model = os.getenv("MODEL") response = self.client.chat.completions.create( model=model, messages=[ {"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": context}, - {"role": "user", "content": question} + {"role": "user", "content": question}, ], stream=False, - temperature=temperature + temperature=temperature, ) return response.choices[0].message.content except Exception as e: return f"Error occurred: {str(e)}" - - def summary_graph(self, graph_data: dict, ): - prompt = get_graph_summary_prompt(graph_data = graph_data) - return self.ask_question(prompt) + + def summary_graph( + self, + graph_data: dict, + ): + prompt = get_graph_summary_prompt(graph_data=graph_data) + return self.ask_question(prompt) + # 示例用法 if __name__ == "__main__": diff --git a/osgraph-service-py/app/llm/prompt_templates/graph_summary.py b/osgraph-service-py/app/llm/prompt_templates/graph_summary.py index d4c37f3..5521869 100644 --- a/osgraph-service-py/app/llm/prompt_templates/graph_summary.py +++ b/osgraph-service-py/app/llm/prompt_templates/graph_summary.py @@ -15,4 +15,4 @@ def get_graph_summary_prompt(graph_data: dict) -> str: "4. 总结图的主要特征和潜在意义。" ) graph_string = f"vertices: {graph_data.get('vertices', [])}, edges: {graph_data.get('edges', [])}" - return template.replace("{graph}", graph_string) \ No newline at end of file + return template.replace("{graph}", graph_string) diff --git a/osgraph-service-py/app/managers/develop_activities.py b/osgraph-service-py/app/managers/develop_activities.py index e656540..3ac661d 100644 --- a/osgraph-service-py/app/managers/develop_activities.py +++ b/osgraph-service-py/app/managers/develop_activities.py @@ -1,61 +1,101 @@ # app/manager/develop_activities.py -from app.models.graph_view import Graph, User, Repo, Push, CreatePR, CodeReviewAction, CreateIssue, CommentIssue +from app.models.graph_view import ( + Graph, + User, + Repo, + Push, + CreatePR, + CodeReviewAction, + CreateIssue, + CommentIssue, +) from typing import Dict, Any from app.services.graph_services.develop_activities import DevelopActivitiesService import json import os + class DevelopActivitiesManager: def __init__(self) -> None: pass - def get_graph(self, data:Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Dict | None: service = DevelopActivitiesService() graph = Graph() result = service.execute(data=data) if result: for data in result: - start_node = json.loads(data['start_node']) - relationship = json.loads(data['relationship']) - end_node = json.loads(data['end_node']) - if start_node["type"] == 'github_user': - user = User(id=start_node["id"],name=start_node["properties"]["name"]) + start_node = json.loads(data["start_node"]) + relationship = json.loads(data["relationship"]) + end_node = json.loads(data["end_node"]) + if start_node["type"] == "github_user": + user = User( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(user) - if start_node["type"] == 'github_repo': - repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "github_repo": + repo = Repo( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(repo) - if end_node["type"] == 'github_user': - user = User(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_user": + user = User(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(user) - if end_node["type"] == 'github_repo': - repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_repo": + repo = Repo(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(repo) if relationship["type"] == "push": - push = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + push = Push( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(push) if relationship["type"] == "open_pr": - open_pr = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + open_pr = Push( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(open_pr) if relationship["type"] == "code_review": - code_review = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + code_review = Push( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(code_review) if relationship["type"] == "open_issue": - open_issue = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + open_issue = Push( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(open_issue) if relationship["type"] == "comment_issue": - comment_issue = Push(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + comment_issue = Push( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(comment_issue) - if os.getenv('SUMMARY_GRAPH') == 'on': + if os.getenv("SUMMARY_GRAPH") == "on": from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() - summary = summary_service.execute(data = graph.to_dict()) + summary = summary_service.execute(data=graph.to_dict()) if summary: graph.update_summary(summary=summary) - return graph.to_dict() \ No newline at end of file + return graph.to_dict() diff --git a/osgraph-service-py/app/managers/fulltext_search.py b/osgraph-service-py/app/managers/fulltext_search.py index 5cdb5d2..9f606a2 100644 --- a/osgraph-service-py/app/managers/fulltext_search.py +++ b/osgraph-service-py/app/managers/fulltext_search.py @@ -2,11 +2,13 @@ from typing import List from app.services.fulltext_search import FulltextSearchService + class FulltextSearchManager: def __init__(self) -> None: pass - def search(self,data) -> List | None: + + def search(self, data) -> List | None: service = FulltextSearchService() result = service.execute(data=data) if result: - return result \ No newline at end of file + return result diff --git a/osgraph-service-py/app/managers/graph_list.py b/osgraph-service-py/app/managers/graph_list.py index 9077587..1b6fe4f 100644 --- a/osgraph-service-py/app/managers/graph_list.py +++ b/osgraph-service-py/app/managers/graph_list.py @@ -2,15 +2,16 @@ from typing import List from app.services.graph_list import GraphListService + class GraphListManager: def __init__(self) -> None: pass def get_graph_list(self) -> List | None: service = GraphListService() - graph_list:List = [] + graph_list: List = [] result = service.execute() if result: for item in result: graph_list.append(item["n"]) - return graph_list \ No newline at end of file + return graph_list diff --git a/osgraph-service-py/app/managers/os_interest.py b/osgraph-service-py/app/managers/os_interest.py index 793b444..9c21ac6 100644 --- a/osgraph-service-py/app/managers/os_interest.py +++ b/osgraph-service-py/app/managers/os_interest.py @@ -10,45 +10,63 @@ class OSInterestManager: def __init__(self) -> None: pass - def get_graph(self, data:Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Dict | None: service = OSInterestService() graph = Graph() result = service.execute(data=data) if result: for data in result: - start_node = json.loads(data['start_node']) - relationship = json.loads(data['relationship']) - end_node = json.loads(data['end_node']) - if start_node["type"] == 'github_user': - user = User(id=start_node["id"],name=start_node["properties"]["name"]) + start_node = json.loads(data["start_node"]) + relationship = json.loads(data["relationship"]) + end_node = json.loads(data["end_node"]) + if start_node["type"] == "github_user": + user = User( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(user) - if start_node["type"] == 'github_repo': - repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "github_repo": + repo = Repo( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(repo) - if start_node["type"] == 'topic': - topic = Topic(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "topic": + topic = Topic( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(topic) - if end_node["type"] == 'github_user': - user = User(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_user": + user = User(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(user) - if end_node["type"] == 'github_repo': - repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_repo": + repo = Repo(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(repo) - if end_node["type"] == 'topic': - topic = Topic(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "topic": + topic = Topic( + id=end_node["id"], name=end_node["properties"]["name"] + ) graph.insert_entity(topic) if relationship["type"] == "belong_to": - belong_to = Belong(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"]) + belong_to = Belong( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + ) graph.insert_relationship(belong_to) if relationship["type"] == "repo": - contribute_repo = ContributeRepo(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + contribute_repo = ContributeRepo( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(contribute_repo) - if os.getenv('SUMMARY_GRAPH') == 'on': + if os.getenv("SUMMARY_GRAPH") == "on": from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() - summary = summary_service.execute(data = graph.to_dict()) + summary = summary_service.execute(data=graph.to_dict()) if summary: graph.update_summary(summary=summary) - return graph.to_dict() \ No newline at end of file + return graph.to_dict() diff --git a/osgraph-service-py/app/managers/os_partner.py b/osgraph-service-py/app/managers/os_partner.py index 58a875f..efae566 100644 --- a/osgraph-service-py/app/managers/os_partner.py +++ b/osgraph-service-py/app/managers/os_partner.py @@ -1,52 +1,85 @@ # app/manager/os_partner.py -from app.models.graph_view import Graph, User, CommonIssue, CommonPR, CommonStar, CommonRepo +from app.models.graph_view import ( + Graph, + User, + CommonIssue, + CommonPR, + CommonStar, + CommonRepo, +) from typing import Dict, Any from app.services.graph_services.os_partner import OSPartnerService import json import os + class OSPartnerManager: def __init__(self) -> None: pass - def get_graph(self, data:Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Dict | None: service = OSPartnerService() graph = Graph() result = service.execute(data=data) if result: if result: for data in result: - start_node = json.loads(data['start_node']) - relationship = json.loads(data['relationship']) - end_node = json.loads(data['end_node']) - if start_node["type"] == 'github_user': - src_user = User(id=start_node["id"],name=start_node["properties"]["name"]) + start_node = json.loads(data["start_node"]) + relationship = json.loads(data["relationship"]) + end_node = json.loads(data["end_node"]) + if start_node["type"] == "github_user": + src_user = User( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(src_user) - - if end_node["type"] == 'github_user': - tar_user = User(id=end_node["id"],name=end_node["properties"]["name"]) + + if end_node["type"] == "github_user": + tar_user = User( + id=end_node["id"], name=end_node["properties"]["name"] + ) graph.insert_entity(tar_user) - - if end_node["type"] == 'common_issue': - common_issue = CommonIssue(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + + if end_node["type"] == "common_issue": + common_issue = CommonIssue( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_entity(common_issue) if relationship["type"] == "common_pr": - common_pr = CommonPR(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + common_pr = CommonPR( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(common_pr) if relationship["type"] == "common_star": - common_star = CommonStar(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + common_star = CommonStar( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(common_star) if relationship["type"] == "common_repo": - common_repo = CommonRepo(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + common_repo = CommonRepo( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(common_repo) - if os.getenv('SUMMARY_GRAPH') == 'on': + if os.getenv("SUMMARY_GRAPH") == "on": from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() - summary = summary_service.execute(data = graph.to_dict()) + summary = summary_service.execute(data=graph.to_dict()) if summary: graph.update_summary(summary=summary) - return graph.to_dict() \ No newline at end of file + return graph.to_dict() diff --git a/osgraph-service-py/app/managers/project_community.py b/osgraph-service-py/app/managers/project_community.py index c0f5661..5e30a5a 100644 --- a/osgraph-service-py/app/managers/project_community.py +++ b/osgraph-service-py/app/managers/project_community.py @@ -1,5 +1,14 @@ # app/manager/project_community.py -from app.models.graph_view import Graph, Repo, Company, Country, User, PullRequestAction, Star, Belong +from app.models.graph_view import ( + Graph, + Repo, + Company, + Country, + User, + PullRequestAction, + Star, + Belong, +) from typing import Dict, Any from app.services.graph_services.project_community import ProjectCommunityService import json @@ -10,54 +19,81 @@ class ProjectCommunityManager: def __init__(self) -> None: pass - def get_graph(self, data:Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Dict | None: service = ProjectCommunityService() graph = Graph() result = service.execute(data=data) if result: for data in result: - start_node = json.loads(data['start_node']) - relationship = json.loads(data['relationship']) - end_node = json.loads(data['end_node']) - if start_node["type"] == 'github_user': - user = User(id=start_node["id"],name=start_node["properties"]["name"]) + start_node = json.loads(data["start_node"]) + relationship = json.loads(data["relationship"]) + end_node = json.loads(data["end_node"]) + if start_node["type"] == "github_user": + user = User( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(user) - if start_node["type"] == 'github_repo': - repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "github_repo": + repo = Repo( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(repo) - if start_node["type"] == 'country': - country = Country(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "country": + country = Country( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(country) - if start_node["type"] == 'company': - company = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "company": + company = Repo( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(company) - if end_node["type"] == 'github_user': - user = User(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_user": + user = User(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(user) - if end_node["type"] == 'github_repo': - repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_repo": + repo = Repo(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(repo) - if start_node["type"] == 'country': - country = Country(id=end_node["id"],name=end_node["properties"]["name"]) + if start_node["type"] == "country": + country = Country( + id=end_node["id"], name=end_node["properties"]["name"] + ) graph.insert_entity(country) - if start_node["type"] == 'company': - company = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + if start_node["type"] == "company": + company = Repo( + id=end_node["id"], name=end_node["properties"]["name"] + ) graph.insert_entity(company) if relationship["type"] == "PR": - pr = PullRequestAction(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + pr = PullRequestAction( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(pr) if relationship["type"] == "Star": - star = Star(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + star = Star( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(star) if relationship["type"] == "belong_to": - belong = Belong(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"]) + belong = Belong( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + ) graph.insert_relationship(belong) - if os.getenv('SUMMARY_GRAPH') == 'on': + if os.getenv("SUMMARY_GRAPH") == "on": from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() - summary = summary_service.execute(data = graph.to_dict()) + summary = summary_service.execute(data=graph.to_dict()) if summary: graph.update_summary(summary=summary) - return graph.to_dict() \ No newline at end of file + return graph.to_dict() diff --git a/osgraph-service-py/app/managers/project_contribution.py b/osgraph-service-py/app/managers/project_contribution.py index 1b80dc0..daf5616 100644 --- a/osgraph-service-py/app/managers/project_contribution.py +++ b/osgraph-service-py/app/managers/project_contribution.py @@ -1,5 +1,14 @@ # app/manager/project_contribution.py -from app.models.graph_view import Graph, User, Repo, CodeReviewAction, CreateIssue, CommitAction, CommentIssue, CreatePR +from app.models.graph_view import ( + Graph, + User, + Repo, + CodeReviewAction, + CreateIssue, + CommitAction, + CommentIssue, + CreatePR, +) from typing import Dict, Any from app.services.graph_services.project_contribution import ProjectContributionService import json @@ -10,47 +19,77 @@ class ProjectContributionManager: def __init__(self) -> None: pass - def get_graph(self, data:Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Dict | None: service = ProjectContributionService() graph = Graph() result = service.execute(data=data) if result: for data in result: - start_node = json.loads(data['start_node']) - relationship = json.loads(data['relationship']) - end_node = json.loads(data['end_node']) - if start_node["type"] == 'github_user': - user = User(id=start_node["id"],name=start_node["properties"]["name"]) + start_node = json.loads(data["start_node"]) + relationship = json.loads(data["relationship"]) + end_node = json.loads(data["end_node"]) + if start_node["type"] == "github_user": + user = User( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(user) - if start_node["type"] == 'github_repo': - repo = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "github_repo": + repo = Repo( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(repo) - if end_node["type"] == 'github_user': - user = User(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_user": + user = User(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(user) - if end_node["type"] == 'github_repo': - repo = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_repo": + repo = Repo(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(repo) if relationship["type"] == "open_issue": - create_issue = CreateIssue(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + create_issue = CreateIssue( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(create_issue) if relationship["type"] == "push": - commit = CommitAction(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + commit = CommitAction( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(commit) if relationship["type"] == "open_pr": - create_pr = CreatePR(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + create_pr = CreatePR( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(create_pr) if relationship["type"] == "code_review": - cr = CodeReviewAction(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + cr = CodeReviewAction( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(cr) if relationship["type"] == "comment_issue": - comment_issue = CommentIssue(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"],count=relationship["properties"]["count"]) + comment_issue = CommentIssue( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(comment_issue) - if os.getenv('SUMMARY_GRAPH') == 'on': + if os.getenv("SUMMARY_GRAPH") == "on": from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() - summary = summary_service.execute(data = graph.to_dict()) + summary = summary_service.execute(data=graph.to_dict()) if summary: graph.update_summary(summary=summary) - return graph.to_dict() \ No newline at end of file + return graph.to_dict() diff --git a/osgraph-service-py/app/managers/project_ecology.py b/osgraph-service-py/app/managers/project_ecology.py index 167e1f9..25d52d1 100644 --- a/osgraph-service-py/app/managers/project_ecology.py +++ b/osgraph-service-py/app/managers/project_ecology.py @@ -10,39 +10,55 @@ class ProjectEcologyManager: def __init__(self) -> None: pass - def get_graph(self, data:Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Dict | None: service = ProjectEcologyService() graph = Graph() result = service.execute(data=data) if result: for data in result: - start_node = json.loads(data['start_node']) - relationship = json.loads(data['relationship']) - end_node = json.loads(data['end_node']) - if start_node["type"] == 'github_repo': - user = Repo(id=start_node["id"],name=start_node["properties"]["name"]) + start_node = json.loads(data["start_node"]) + relationship = json.loads(data["relationship"]) + end_node = json.loads(data["end_node"]) + if start_node["type"] == "github_repo": + user = Repo( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(user) - if start_node["type"] == 'github_organization': - organization = Orgnization(id=start_node["id"],name=start_node["properties"]["name"]) + if start_node["type"] == "github_organization": + organization = Orgnization( + id=start_node["id"], name=start_node["properties"]["name"] + ) graph.insert_entity(organization) - if end_node["type"] == 'github_repo': - user = Repo(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_repo": + user = Repo(id=end_node["id"], name=end_node["properties"]["name"]) graph.insert_entity(user) - if end_node["type"] == 'github_organization': - organization = Orgnization(id=end_node["id"],name=end_node["properties"]["name"]) + if end_node["type"] == "github_organization": + organization = Orgnization( + id=end_node["id"], name=end_node["properties"]["name"] + ) graph.insert_entity(organization) if relationship["type"] == "belong_to": - belong_to = Belong(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"]) + belong_to = Belong( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + ) graph.insert_relationship(belong_to) if relationship["type"] == "common_developer": - common_developer = CommonDevelop(sid=relationship["src"],tid=relationship["dst"],id=relationship["id"], count=relationship["properties"]["count"]) + common_developer = CommonDevelop( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) graph.insert_relationship(common_developer) - if os.getenv('SUMMARY_GRAPH') == 'on': + if os.getenv("SUMMARY_GRAPH") == "on": from app.services.graph_summary import GraphSummaryService + summary_service = GraphSummaryService() - summary = summary_service.execute(data = graph.to_dict()) + summary = summary_service.execute(data=graph.to_dict()) if summary: graph.update_summary(summary=summary) - return graph.to_dict() \ No newline at end of file + return graph.to_dict() diff --git a/osgraph-service-py/app/models/default_graph.py b/osgraph-service-py/app/models/default_graph.py index 31add8e..5a44c0b 100644 --- a/osgraph-service-py/app/models/default_graph.py +++ b/osgraph-service-py/app/models/default_graph.py @@ -1,14 +1,17 @@ from dataclasses import dataclass, asdict from typing import Any, Optional -import os +import os from dotenv import load_dotenv + load_dotenv() -graph_name = os.getenv('TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME') +graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") + + class Vertex: label: str primary: str - type: str = 'vertex' + type: str = "vertex" _props: Optional[Any] = None def __init__(self, label: str, primary: str): @@ -20,12 +23,15 @@ def props(self) -> Any: return self._props def __repr__(self): - return (f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, " - f"type={self.type}, props={self.props})") + return ( + f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, " + f"type={self.type}, props={self.props})" + ) + class Edge: label: str - type: str = 'edge' + type: str = "edge" source: Any target: Any _props: Optional[Any] = None @@ -40,8 +46,11 @@ def props(self) -> Any: return self._props def __repr__(self): - return (f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, " - f"type={self.type}, source={self.source}, target={self.target}, props={self.props})") + return ( + f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, " + f"type={self.type}, source={self.source}, target={self.target}, props={self.props})" + ) + @dataclass class GitHubUserProps: @@ -50,6 +59,7 @@ class GitHubUserProps: company: Optional[str] = None country: Optional[str] = None + class GitHubUser(Vertex): def __init__(self, props: GitHubUserProps): if not isinstance(props, GitHubUserProps): @@ -191,6 +201,7 @@ def __init__(self, props: Optional[GitHubOrganizationProps] = None): super().__init__(label="github_organization", primary="id") self._props = props + @dataclass class PushProps: commits: Optional[int] = None @@ -388,6 +399,3 @@ class UseLicense(Edge): def __init__(self, source, target): super().__init__(label="use_license", source=source, target=target) self._props = None - - - diff --git a/osgraph-service-py/app/models/graph_view.py b/osgraph-service-py/app/models/graph_view.py index a29b7b0..0cb5e1f 100644 --- a/osgraph-service-py/app/models/graph_view.py +++ b/osgraph-service-py/app/models/graph_view.py @@ -3,6 +3,8 @@ from dataclasses import dataclass, field, asdict from typing import List, Optional, Literal import json + + @dataclass class Vertex: id: int | str @@ -10,22 +12,24 @@ class Vertex: comment: Optional[str] = None source: Optional[str] = None size: Optional[int] = None + @property def vertex_type(self): return self.__class__.__name__ def to_dict(self) -> dict: data = asdict(self) - data['type'] = self.vertex_type + data["type"] = self.vertex_type return data - + + @dataclass class Edge: sid: str | int tid: str | int id: str | int name: str - direction: Literal['both','out','in'] = 'out' + direction: Literal["both", "out", "in"] = "out" comment: Optional[int] = None weight: Optional[int] = None count: Optional[int] = None @@ -36,16 +40,17 @@ def edge_type(self): def to_dict(self) -> dict: data = asdict(self) - data['type'] = self.edge_type + data["type"] = self.edge_type return data + @dataclass class Graph: vertices: List[Vertex] = field(default_factory=list) edges: List[Edge] = field(default_factory=list) - summary:str = '' + summary: str = "" - def update_summary(self,summary): + def update_summary(self, summary): self.summary = summary def insert_entity(self, new_vertex: Vertex): @@ -59,9 +64,11 @@ def insert_entity(self, new_vertex: Vertex): def insert_relationship(self, new_edge: Edge): for i, edge in enumerate(self.edges): - if (edge.sid == new_edge.sid and - edge.tid == new_edge.tid and - edge.edge_type == new_edge.edge_type): + if ( + edge.sid == new_edge.sid + and edge.tid == new_edge.tid + and edge.edge_type == new_edge.edge_type + ): # Update existing relationship self.edges[i] = new_edge return @@ -71,149 +78,168 @@ def insert_relationship(self, new_edge: Edge): def filter_entities(self, **criteria): """Filter entities based on provided keyword arguments.""" return [ - vertex for vertex in self.vertices + vertex + for vertex in self.vertices if all(getattr(vertex, key) == value for key, value in criteria.items()) ] def filter_relationships(self, **criteria): """Filter relationships based on provided keyword arguments.""" return [ - edge for edge in self.edges + edge + for edge in self.edges if all(getattr(edge, key) == value for key, value in criteria.items()) ] - + def to_dict(self) -> str: graph_dict = { "vertices": [v.to_dict() for v in self.vertices], "edges": [e.to_dict() for e in self.edges], - "summary": self.summary + "summary": self.summary, } return graph_dict - + + # vertex class class User(Vertex): pass + class Repo(Vertex): pass + class Orgnization(Vertex): pass + class Country(Vertex): pass - + + class Company(Vertex): pass + class Topic(Vertex): pass + class Contibution(Vertex): pass + class PR(Contibution): pass + class Issue(Contibution): pass + class Comment(Contibution): pass + class CodeReview(Comment): pass + class Commit(Contibution): pass + # edge class @dataclass class Create(Edge): - name: Literal['创建'] = field(default='创建', init=False) + name: Literal["创建"] = field(default="创建", init=False) + @dataclass class CreatePR(Create): - name: Literal['创建 PR'] = field(default='创建 PR', init=False) + name: Literal["创建 PR"] = field(default="创建 PR", init=False) + @dataclass class CreateIssue(Create): - name: Literal['创建 Issue'] = field(default='创建 Issue', init=False) + name: Literal["创建 Issue"] = field(default="创建 Issue", init=False) + @dataclass class CreateCommit(Create): - name: Literal['创建 Commit'] = field(default='创建 Commit', init=False) + name: Literal["创建 Commit"] = field(default="创建 Commit", init=False) + @dataclass class CreateComment(Create): - name: Literal['创建 Comment'] = field(default='创建 Comment', init=False) + name: Literal["创建 Comment"] = field(default="创建 Comment", init=False) + -@dataclass +@dataclass class CreateCR(Create): - name: Literal['创建 CR'] = field(default='创建 CR', init=False) + name: Literal["创建 CR"] = field(default="创建 CR", init=False) + @dataclass class CodeReviewAction(Edge): - name: Literal['CR'] = field(default='CR', init=False) + name: Literal["CR"] = field(default="CR", init=False) + @dataclass class Belong(Edge): - name: Literal['属于'] = field(default='属于', init=False) + name: Literal["属于"] = field(default="属于", init=False) + @dataclass class Star(Edge): - name: Literal['Star'] = field(default='Star', init=False) + name: Literal["Star"] = field(default="Star", init=False) + @dataclass class PullRequestAction(Edge): - name: Literal['PR'] = field(default='PR', init=False) + name: Literal["PR"] = field(default="PR", init=False) + @dataclass class Push(PullRequestAction): - name: Literal['推送'] = field(default='推送', init=False) + name: Literal["推送"] = field(default="推送", init=False) + @dataclass class CommitAction(PullRequestAction): - name: Literal['提交'] = field(default='提交', init=False) + name: Literal["提交"] = field(default="提交", init=False) + @dataclass class CommentIssue(Edge): - name: Literal['评论 Issue'] = field(default='评论 Issue', init=False) + name: Literal["评论 Issue"] = field(default="评论 Issue", init=False) + @dataclass class CommonIssue(Edge): - name: Literal['合作 Issue'] = field(default='合作 Issue', init=False) + name: Literal["合作 Issue"] = field(default="合作 Issue", init=False) + @dataclass class CommonPR(Edge): - name: Literal['合作 PR'] = field(default='合作 PR', init=False) + name: Literal["合作 PR"] = field(default="合作 PR", init=False) + @dataclass class CommonStar(Edge): - name: Literal['共同关注'] = field(default='共同关注', init=False) + name: Literal["共同关注"] = field(default="共同关注", init=False) + @dataclass class CommonRepo(Edge): - name: Literal['合作项目'] = field(default='合作项目', init=False) + name: Literal["合作项目"] = field(default="合作项目", init=False) + @dataclass class CommonDevelop(Edge): - name: Literal['共建'] = field(default='共建', init=False) + name: Literal["共建"] = field(default="共建", init=False) + @dataclass class ContributeRepo(Edge): - name: Literal['贡献项目'] = field(default='贡献项目', init=False) - - - - - - - - - - - - - - + name: Literal["贡献项目"] = field(default="贡献项目", init=False) diff --git a/osgraph-service-py/app/models/system_graph.py b/osgraph-service-py/app/models/system_graph.py index 8081107..3de99dc 100644 --- a/osgraph-service-py/app/models/system_graph.py +++ b/osgraph-service-py/app/models/system_graph.py @@ -2,6 +2,7 @@ from dataclasses import dataclass + @dataclass class GraphServiceProps: name: str = "" @@ -13,15 +14,13 @@ class GraphServiceProps: class GraphService: label: str = "graph_service" primary: str = "name" - type: str = 'VERTEX' + type: str = "VERTEX" props: GraphServiceProps = None + def __init__(self, name: str, comment: str, input_types: str, filter_keys: str): self.props = GraphServiceProps( - name=name, - comment=comment, - input_types=input_types, - filter_keys=filter_keys + name=name, comment=comment, input_types=input_types, filter_keys=filter_keys ) - + def __repr__(self): - return (f"GitHubUser(label={self.label}, pk={self.primary}, type={self.type}, props={self.props})") \ No newline at end of file + return f"GitHubUser(label={self.label}, pk={self.primary}, type={self.type}, props={self.props})" diff --git a/osgraph-service-py/app/routes/develop_activities.py b/osgraph-service-py/app/routes/develop_activities.py index 4fee01a..92fa517 100644 --- a/osgraph-service-py/app/routes/develop_activities.py +++ b/osgraph-service-py/app/routes/develop_activities.py @@ -6,9 +6,12 @@ import logging from dataclasses import asdict -develop_activities_bp = Blueprint('project_activities', __name__, url_prefix='/api/graph') +develop_activities_bp = Blueprint( + "project_activities", __name__, url_prefix="/api/graph" +) logger = logging.getLogger(__name__) + class DevelopActivitiesController: def __init__(self): self.manager = DevelopActivitiesManager() @@ -24,9 +27,11 @@ def get_activities_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = DevelopActivitiesController() -@develop_activities_bp.route('/develop-activities', methods=['GET']) + +@develop_activities_bp.route("/develop-activities", methods=["GET"]) def get_project_activities(): data = request.args.to_dict() response = controller.get_activities_graph(data) diff --git a/osgraph-service-py/app/routes/fulltext_search.py b/osgraph-service-py/app/routes/fulltext_search.py index 1e46c76..d26c8af 100644 --- a/osgraph-service-py/app/routes/fulltext_search.py +++ b/osgraph-service-py/app/routes/fulltext_search.py @@ -6,13 +6,15 @@ import logging from dataclasses import asdict -fulltext_search_bp = Blueprint('fulltext_search', __name__, url_prefix='/api/graph') +fulltext_search_bp = Blueprint("fulltext_search", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) + class FulltextSearchController: def __init__(self): self.manager = FulltextSearchManager() - def search(self,data) -> Dict[str, Any]: + + def search(self, data) -> Dict[str, Any]: try: result = self.manager.search(data=data) return ResponseHandler.success(result) @@ -23,9 +25,11 @@ def search(self,data) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = FulltextSearchController() -@fulltext_search_bp.route('/fulltext-search', methods=['GET']) + +@fulltext_search_bp.route("/fulltext-search", methods=["GET"]) def get_os_interest(): data = request.args.to_dict() response = controller.search(data) diff --git a/osgraph-service-py/app/routes/graph_list.py b/osgraph-service-py/app/routes/graph_list.py index 8efb950..0eba253 100644 --- a/osgraph-service-py/app/routes/graph_list.py +++ b/osgraph-service-py/app/routes/graph_list.py @@ -6,12 +6,14 @@ import logging from dataclasses import asdict -graph_list_bp = Blueprint('graph_list', __name__, url_prefix='/api/graph') +graph_list_bp = Blueprint("graph_list", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) + class OSInterestController: def __init__(self): self.manager = GraphListManager() + def get_graph_list(self) -> Dict[str, Any]: try: result = self.manager.get_graph_list() @@ -23,9 +25,11 @@ def get_graph_list(self) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = OSInterestController() -@graph_list_bp.route('/list', methods=['GET']) + +@graph_list_bp.route("/list", methods=["GET"]) def get_os_interest(): response = controller.get_graph_list() return ResponseHandler.jsonify_response(response) diff --git a/osgraph-service-py/app/routes/os_interest.py b/osgraph-service-py/app/routes/os_interest.py index 7bcb850..32a721f 100644 --- a/osgraph-service-py/app/routes/os_interest.py +++ b/osgraph-service-py/app/routes/os_interest.py @@ -6,9 +6,10 @@ import logging from dataclasses import asdict -os_interest_bp = Blueprint('os_interest', __name__, url_prefix='/api/graph') +os_interest_bp = Blueprint("os_interest", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) + class OSInterestController: def __init__(self): self.manager = OSInterestManager() @@ -27,9 +28,11 @@ def get_interest_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = OSInterestController() -@os_interest_bp.route('/os-interest', methods=['GET']) + +@os_interest_bp.route("/os-interest", methods=["GET"]) def get_os_interest(): """ 获取项目贡献的图谱 diff --git a/osgraph-service-py/app/routes/os_partner.py b/osgraph-service-py/app/routes/os_partner.py index fa3f528..2330095 100644 --- a/osgraph-service-py/app/routes/os_partner.py +++ b/osgraph-service-py/app/routes/os_partner.py @@ -6,9 +6,10 @@ import logging from dataclasses import asdict -os_partner_bp = Blueprint('os_partner', __name__, url_prefix='/api/graph') +os_partner_bp = Blueprint("os_partner", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) + class OSPartnerController: def __init__(self): self.manager = OSPartnerManager() @@ -24,9 +25,11 @@ def get_partner_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = OSPartnerController() -@os_partner_bp.route('/os-partner', methods=['GET']) + +@os_partner_bp.route("/os-partner", methods=["GET"]) def get_os_partner(): data = request.args.to_dict() response = controller.get_partner_graph(data) diff --git a/osgraph-service-py/app/routes/project_community.py b/osgraph-service-py/app/routes/project_community.py index e598f28..87e5b7a 100644 --- a/osgraph-service-py/app/routes/project_community.py +++ b/osgraph-service-py/app/routes/project_community.py @@ -6,9 +6,10 @@ import logging from dataclasses import asdict -project_community_bp = Blueprint('project_community', __name__, url_prefix='/api/graph') +project_community_bp = Blueprint("project_community", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) + class ProjectCommunityController: def __init__(self): self.manager = ProjectCommunityManager() @@ -24,9 +25,11 @@ def get_community_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = ProjectCommunityController() -@project_community_bp.route('/project-community', methods=['GET']) + +@project_community_bp.route("/project-community", methods=["GET"]) def get_project_community(): data = request.args.to_dict() response = controller.get_community_graph(data) diff --git a/osgraph-service-py/app/routes/project_contribution.py b/osgraph-service-py/app/routes/project_contribution.py index 1599987..3032cc6 100644 --- a/osgraph-service-py/app/routes/project_contribution.py +++ b/osgraph-service-py/app/routes/project_contribution.py @@ -6,9 +6,12 @@ import logging from dataclasses import asdict -project_contribution_bp = Blueprint('project_contribution', __name__, url_prefix='/api/graph') +project_contribution_bp = Blueprint( + "project_contribution", __name__, url_prefix="/api/graph" +) logger = logging.getLogger(__name__) + class ProjectContributionController: def __init__(self): self.manager = ProjectContributionManager() @@ -24,9 +27,11 @@ def get_contribution_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = ProjectContributionController() -@project_contribution_bp.route('/project-contribution', methods=['GET']) + +@project_contribution_bp.route("/project-contribution", methods=["GET"]) def get_project_contribution(): data = request.args.to_dict() response = controller.get_contribution_graph(data) diff --git a/osgraph-service-py/app/routes/project_ecology.py b/osgraph-service-py/app/routes/project_ecology.py index 9b7cc4c..bdcc1df 100644 --- a/osgraph-service-py/app/routes/project_ecology.py +++ b/osgraph-service-py/app/routes/project_ecology.py @@ -6,9 +6,10 @@ import logging from dataclasses import asdict -project_ecology_bp = Blueprint('project_ecology', __name__, url_prefix='/api/graph') +project_ecology_bp = Blueprint("project_ecology", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) + class ProjectEcologyController: def __init__(self): self.manager = ProjectEcologyManager() @@ -24,9 +25,11 @@ def get_ecology_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) + controller = ProjectEcologyController() -@project_ecology_bp.route('/project-ecology', methods=['GET']) + +@project_ecology_bp.route("/project-ecology", methods=["GET"]) def get_project_ecology(): data = request.args.to_dict() response = controller.get_ecology_graph(data) diff --git a/osgraph-service-py/app/services/__init__.py b/osgraph-service-py/app/services/__init__.py index ca4ad9a..3d2d2b5 100644 --- a/osgraph-service-py/app/services/__init__.py +++ b/osgraph-service-py/app/services/__init__.py @@ -1,10 +1,14 @@ # app/services/__init__.py from flask import current_app -from app.services.graph_services.project_contribution import ProjectContributionServiceConfig +from app.services.graph_services.project_contribution import ( + ProjectContributionServiceConfig, +) from app.services.graph_services.project_ecology import ProjectEcologyServiceConfig from app.services.graph_services.project_community import ProjectCommunityServiceConfig -from app.services.graph_services.develop_activities import DevelopActivitiesServiceConfig +from app.services.graph_services.develop_activities import ( + DevelopActivitiesServiceConfig, +) from app.services.graph_services.os_partner import OSPartnerServiceConfig from app.services.graph_services.os_interest import OSInterestServiceConfig @@ -14,13 +18,14 @@ ProjectCommunityServiceConfig(), DevelopActivitiesServiceConfig(), OSPartnerServiceConfig(), - OSInterestServiceConfig() + OSInterestServiceConfig(), ] + def register_all_services(): """ 遍历所有的服务配置,并在应用启动时自动注册。 """ with current_app.app_context(): for config in SERVICE_CONFIGS: - config.register_service() \ No newline at end of file + config.register_service() diff --git a/osgraph-service-py/app/services/fulltext_search.py b/osgraph-service-py/app/services/fulltext_search.py index 38f9c10..dfa4d90 100644 --- a/osgraph-service-py/app/services/fulltext_search.py +++ b/osgraph-service-py/app/services/fulltext_search.py @@ -1,10 +1,12 @@ from typing import Any from app.dal.search.es import ElasticsearchClient from app.utils.custom_exceptions import InvalidUsage -import os +import os from dotenv import load_dotenv + load_dotenv() + class FulltextSearchService: def execute(self, data) -> Any: if "index-name" not in data: @@ -13,11 +15,7 @@ def execute(self, data) -> Any: keyword = data["keyword"] if not keyword: return [] - query = { - "match": { - "name": keyword - } - } + query = {"match": {"name": keyword}} client = ElasticsearchClient() - result = client.search(index = index_name, query = query, size=10) + result = client.search(index=index_name, query=query, size=10) return result diff --git a/osgraph-service-py/app/services/graph_list.py b/osgraph-service-py/app/services/graph_list.py index db23484..48ee853 100644 --- a/osgraph-service-py/app/services/graph_list.py +++ b/osgraph-service-py/app/services/graph_list.py @@ -1,13 +1,15 @@ from typing import Any from app.dal.graph.tugraph import GraphClient -import os +import os from dotenv import load_dotenv + load_dotenv() + class GraphListService: def execute(self) -> Any: graph_name = os.getenv("TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f'''MATCH (n:graph_service) RETURN n''' + cypher = f"""MATCH (n:graph_service) RETURN n""" result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/__init__.py b/osgraph-service-py/app/services/graph_services/__init__.py index 5f6aac3..66257bc 100644 --- a/osgraph-service-py/app/services/graph_services/__init__.py +++ b/osgraph-service-py/app/services/graph_services/__init__.py @@ -1,4 +1 @@ # app/services/graph_services/__init__.py - - - diff --git a/osgraph-service-py/app/services/graph_services/base.py b/osgraph-service-py/app/services/graph_services/base.py index 8d33eec..f456a8b 100644 --- a/osgraph-service-py/app/services/graph_services/base.py +++ b/osgraph-service-py/app/services/graph_services/base.py @@ -9,6 +9,7 @@ load_dotenv() + @dataclass class FilterKey: key: str @@ -16,6 +17,7 @@ class FilterKey: default: Union[Callable[[], Any], Any] required: bool = False + @dataclass class ServiceConfig: name: str @@ -29,17 +31,19 @@ def register_service(self) -> None: properties = { "name": self.name, "comment": self.comment, - "input_types": ','.join(self.inputTypes), - "filter_keys": ','.join([f"{key.key}:{key.default}" for key in self.filterKeys]) + "input_types": ",".join(self.inputTypes), + "filter_keys": ",".join( + [f"{key.key}:{key.default}" for key in self.filterKeys] + ), } service = GraphService( name=self.name, comment=self.comment, input_types=properties["input_types"], - filter_keys=properties["filter_keys"] + filter_keys=properties["filter_keys"], ) - client.upsert_vertex(GraphService.label,service.props) - + client.upsert_vertex(GraphService.label, service.props) + class BaseService(ABC): def __init__(self, config: ServiceConfig): @@ -58,14 +62,20 @@ def validate_params(self, data: Dict[str, Any]) -> Dict[str, Any]: if filter_key.key not in data: if filter_key.required: raise InvalidUsage(f"Missing required filter key: {filter_key.key}") - value = filter_key.default() if callable(filter_key.default) else filter_key.default + value = ( + filter_key.default() + if callable(filter_key.default) + else filter_key.default + ) else: value = data[filter_key.key] if filter_key.type == "int": try: validated_filters[filter_key.key] = int(value) except ValueError: - raise InvalidUsage(f"Invalid value for {filter_key.key}: must be an integer.") + raise InvalidUsage( + f"Invalid value for {filter_key.key}: must be an integer." + ) elif filter_key.type == "str": validated_filters[filter_key.key] = str(value) else: diff --git a/osgraph-service-py/app/services/graph_services/develop_activities.py b/osgraph-service-py/app/services/graph_services/develop_activities.py index 0b5c15e..4c221f8 100644 --- a/osgraph-service-py/app/services/graph_services/develop_activities.py +++ b/osgraph-service-py/app/services/graph_services/develop_activities.py @@ -3,16 +3,20 @@ from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os +import os from dotenv import load_dotenv + load_dotenv() + def get_default_start_time() -> int: return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + def get_default_end_time() -> int: return int(datetime.now().timestamp() * 1000) + class DevelopActivitiesServiceConfig(ServiceConfig): def __init__(self): super().__init__( @@ -20,10 +24,11 @@ def __init__(self): comment="这是一个开发活动图谱", inputTypes=["GitHubUser"], filterKeys=[ - FilterKey(key="topn", type="int", default=50, required=False), - ] + FilterKey(key="topn", type="int", default=50, required=False), + ], ) + class DevelopActivitiesService(BaseService): def __init__(self): super().__init__(DevelopActivitiesServiceConfig()) @@ -33,16 +38,12 @@ def execute(self, data: Dict[str, Any]) -> Any: github_user: str = validated_data["GitHubUser"] topn: int = validated_data["topn"] | 50 es = ElasticsearchClient() - query = { - "term": { - "name.keyword": github_user - } - } - res = es.search(index='github_user',query=query) + query = {"term": {"name.keyword": github_user}} + res = es.search(index="github_user", query=query) if len(res): develop_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f'''CALL osgraph.get_developer_contribution('{{"developer_id":{develop_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + cypher = f"""CALL osgraph.get_developer_contribution('{{"developer_id":{develop_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/os_interest.py b/osgraph-service-py/app/services/graph_services/os_interest.py index b8d8fba..e5c55e4 100644 --- a/osgraph-service-py/app/services/graph_services/os_interest.py +++ b/osgraph-service-py/app/services/graph_services/os_interest.py @@ -3,16 +3,20 @@ from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os +import os from dotenv import load_dotenv + load_dotenv() + def get_default_start_time() -> int: return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + def get_default_end_time() -> int: return int(datetime.now().timestamp() * 1000) + class OSInterestServiceConfig(ServiceConfig): def __init__(self): super().__init__( @@ -21,10 +25,13 @@ def __init__(self): inputTypes=["GitHubUser"], filterKeys=[ FilterKey(key="topic-topn", type="int", default=50, required=False), - FilterKey(key="githubrepo-topn", type="int", default=50, required=False), - ] + FilterKey( + key="githubrepo-topn", type="int", default=50, required=False + ), + ], ) + class OSInterestService(BaseService): def __init__(self): super().__init__(OSInterestServiceConfig()) @@ -35,16 +42,12 @@ def execute(self, data: Dict[str, Any]) -> Any: topic_topn: int = validated_data["topic-topn"] repo_topn: int = validated_data["githubrepo-topn"] es = ElasticsearchClient() - query = { - "term": { - "name.keyword": user_name - } - } - res = es.search(index='github_user',query=query) + query = {"term": {"name.keyword": user_name}} + res = es.search(index="github_user", query=query) if len(res): user_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f'''CALL osgraph.get_developer_repos_profile('{{"developer_id":{user_id},"topic_topn":{topic_topn},"repo_topn":{repo_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + cypher = f"""CALL osgraph.get_developer_repos_profile('{{"developer_id":{user_id},"topic_topn":{topic_topn},"repo_topn":{repo_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/os_partner.py b/osgraph-service-py/app/services/graph_services/os_partner.py index 48a5a0e..ef460a4 100644 --- a/osgraph-service-py/app/services/graph_services/os_partner.py +++ b/osgraph-service-py/app/services/graph_services/os_partner.py @@ -3,16 +3,20 @@ from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os +import os from dotenv import load_dotenv + load_dotenv() + def get_default_start_time() -> int: return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + def get_default_end_time() -> int: return int(datetime.now().timestamp() * 1000) + class OSPartnerServiceConfig(ServiceConfig): def __init__(self): super().__init__( @@ -21,9 +25,10 @@ def __init__(self): inputTypes=["GitHubUser"], filterKeys=[ FilterKey(key="topn", type="int", default=50, required=False), - ] + ], ) + class OSPartnerService(BaseService): def __init__(self): super().__init__(OSPartnerServiceConfig()) @@ -33,16 +38,12 @@ def execute(self, data: Dict[str, Any]) -> Any: github_user: str = validated_data["GitHubUser"] topn: int = validated_data["topn"] es = ElasticsearchClient() - query = { - "term": { - "name.keyword": github_user - } - } - res = es.search(index='github_user',query=query) + query = {"term": {"name.keyword": github_user}} + res = es.search(index="github_user", query=query) if len(res): user_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f'''CALL osgraph.get_developer_by_developer('{{"developer_id":{user_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + cypher = f"""CALL osgraph.get_developer_by_developer('{{"developer_id":{user_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/project_community.py b/osgraph-service-py/app/services/graph_services/project_community.py index 6f3da87..58f54e3 100644 --- a/osgraph-service-py/app/services/graph_services/project_community.py +++ b/osgraph-service-py/app/services/graph_services/project_community.py @@ -3,16 +3,20 @@ from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os +import os from dotenv import load_dotenv + load_dotenv() + def get_default_start_time() -> int: return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + def get_default_end_time() -> int: return int(datetime.now().timestamp() * 1000) + class ProjectCommunityServiceConfig(ServiceConfig): def __init__(self): super().__init__( @@ -23,9 +27,10 @@ def __init__(self): FilterKey(key="company-topn", type="int", default=50, required=False), FilterKey(key="country-topn", type="int", default=50, required=False), FilterKey(key="developer-topn", type="int", default=50, required=False), - ] + ], ) + class ProjectCommunityService(BaseService): def __init__(self): super().__init__(ProjectCommunityServiceConfig()) @@ -37,16 +42,12 @@ def execute(self, data: Dict[str, Any]) -> Any: country_topn: int = validated_data["country-topn"] developer_topn: int = validated_data["developer-topn"] es = ElasticsearchClient() - query = { - "term": { - "name.keyword": github_repo - } - } - res = es.search(index='github_repo',query=query) + query = {"term": {"name.keyword": github_repo}} + res = es.search(index="github_repo", query=query) if len(res): repo_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f'''CALL osgraph.get_repo_developers_profile('{{"repo_id":{repo_id},"company_topn":{company_topn},"country_topn":{country_topn},"developer_topn":{developer_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + cypher = f"""CALL osgraph.get_repo_developers_profile('{{"repo_id":{repo_id},"company_topn":{company_topn},"country_topn":{country_topn},"developer_topn":{developer_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/project_contribution.py b/osgraph-service-py/app/services/graph_services/project_contribution.py index 00c5655..2e70c7f 100644 --- a/osgraph-service-py/app/services/graph_services/project_contribution.py +++ b/osgraph-service-py/app/services/graph_services/project_contribution.py @@ -3,16 +3,20 @@ from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os +import os from dotenv import load_dotenv + load_dotenv() + def get_default_start_time() -> int: - return int((datetime.now() - timedelta(days = 30)).timestamp() * 1000) + return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + def get_default_end_time() -> int: return int(datetime.now().timestamp() * 1000) + class ProjectContributionServiceConfig(ServiceConfig): def __init__(self): super().__init__( @@ -20,12 +24,25 @@ def __init__(self): comment="这是一个获取项目贡献的图谱", inputTypes=["GitHubRepo"], filterKeys=[ - FilterKey(key="start-time", type="int", default=get_default_start_time(), required=False), - FilterKey(key="end-time", type="int", default=get_default_end_time(), required=False), - FilterKey(key="contribution-limit", type="int", default=50, required=False) - ] + FilterKey( + key="start-time", + type="int", + default=get_default_start_time(), + required=False, + ), + FilterKey( + key="end-time", + type="int", + default=get_default_end_time(), + required=False, + ), + FilterKey( + key="contribution-limit", type="int", default=50, required=False + ), + ], ) + class ProjectContributionService(BaseService): def __init__(self): super().__init__(ProjectContributionServiceConfig()) @@ -35,23 +52,18 @@ def execute(self, data: Dict[str, Any]) -> Any: github_repo: str = validated_data["GitHubRepo"] start_time: int = validated_data["start-time"] or get_default_start_time() end_time: int = validated_data["end-time"] or get_default_end_time() - config_name = os.getenv('FLASK_CONFIG') + config_name = os.getenv("FLASK_CONFIG") # if config_name == 'development': - # start_time = 0 + # start_time = 0 start_time = 0 contribution_limit: int = validated_data["contribution-limit"] es = ElasticsearchClient() - query = { - "term": { - "name.keyword": github_repo - } - } - res = es.search(index='github_repo',query=query) + query = {"term": {"name.keyword": github_repo}} + res = es.search(index="github_repo", query=query) if len(res): repo_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f'''CALL osgraph.get_repo_contribution('{{"repo_id":{repo_id},"start_timestamp":{start_time},"end_timestamp":{end_time},"top_n":{contribution_limit}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + cypher = f"""CALL osgraph.get_repo_contribution('{{"repo_id":{repo_id},"start_timestamp":{start_time},"end_timestamp":{end_time},"top_n":{contribution_limit}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" result = client.run(cypher) return result - diff --git a/osgraph-service-py/app/services/graph_services/project_ecology.py b/osgraph-service-py/app/services/graph_services/project_ecology.py index 324c37e..8721a28 100644 --- a/osgraph-service-py/app/services/graph_services/project_ecology.py +++ b/osgraph-service-py/app/services/graph_services/project_ecology.py @@ -3,16 +3,20 @@ from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os +import os from dotenv import load_dotenv + load_dotenv() + def get_default_start_time() -> int: return int((datetime.now() - timedelta(days=30)).timestamp() * 1000) + def get_default_end_time() -> int: return int(datetime.now().timestamp() * 1000) + class ProjectEcologyServiceConfig(ServiceConfig): def __init__(self): super().__init__( @@ -21,9 +25,10 @@ def __init__(self): inputTypes=["GitHubRepo"], filterKeys=[ FilterKey(key="topn", type="int", default=50, required=False), - ] + ], ) + class ProjectEcologyService(BaseService): def __init__(self): super().__init__(ProjectEcologyServiceConfig()) @@ -33,16 +38,12 @@ def execute(self, data: Dict[str, Any]) -> Any: github_repo: str = validated_data["GitHubRepo"] top_n: int = validated_data["topn"] es = ElasticsearchClient() - query = { - "term": { - "name.keyword": github_repo - } - } - res = es.search(index='github_repo',query=query) + query = {"term": {"name.keyword": github_repo}} + res = es.search(index="github_repo", query=query) if len(res): repo_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f'''CALL osgraph.get_repo_by_repo('{{"repo_id":{repo_id}, "top_n":{top_n}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node''' + cypher = f"""CALL osgraph.get_repo_by_repo('{{"repo_id":{repo_id}, "top_n":{top_n}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_summary.py b/osgraph-service-py/app/services/graph_summary.py index ba71054..214a3c5 100644 --- a/osgraph-service-py/app/services/graph_summary.py +++ b/osgraph-service-py/app/services/graph_summary.py @@ -1,8 +1,9 @@ from typing import Any from app.llm.openai_client import OpenAIClient + class GraphSummaryService: - def execute(self, data:dict) -> Any: + def execute(self, data: dict) -> Any: llm = OpenAIClient() result = llm.summary_graph(graph_data=data) return result diff --git a/osgraph-service-py/app/utils/custom_exceptions.py b/osgraph-service-py/app/utils/custom_exceptions.py index 7e9f34c..4efc538 100644 --- a/osgraph-service-py/app/utils/custom_exceptions.py +++ b/osgraph-service-py/app/utils/custom_exceptions.py @@ -11,5 +11,5 @@ def __init__(self, message, status_code=None, payload=None): def to_dict(self): rv = dict(self.payload or ()) - rv['message'] = self.message + rv["message"] = self.message return rv diff --git a/osgraph-service-py/app/utils/logger.py b/osgraph-service-py/app/utils/logger.py index 5cd1123..77bea19 100644 --- a/osgraph-service-py/app/utils/logger.py +++ b/osgraph-service-py/app/utils/logger.py @@ -2,25 +2,29 @@ from logging.handlers import RotatingFileHandler import os + def setup_logger(app): - log_dir = os.path.join(app.root_path, 'logs') + log_dir = os.path.join(app.root_path, "logs") if not os.path.exists(log_dir): os.makedirs(log_dir) - + handler = RotatingFileHandler( - os.path.join(log_dir, 'app.log'), + os.path.join(log_dir, "app.log"), maxBytes=10 * 1024 * 1024, # 10MB - backupCount=5 + backupCount=5, ) - + formatter = logging.Formatter( - '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]' + "%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]" ) handler.setFormatter(formatter) root_logger = logging.getLogger() - root_logger.setLevel(app.config['LOG_LEVEL']) + root_logger.setLevel(app.config["LOG_LEVEL"]) if not any(isinstance(h, RotatingFileHandler) for h in root_logger.handlers): root_logger.addHandler(handler) - logging.getLogger('werkzeug').setLevel(logging.WARNING) - logging.getLogger('flask').setLevel(logging.WARNING) - root_logger.info('The logger has been started, log level: %s', logging.getLevelName(app.config['LOG_LEVEL'])) + logging.getLogger("werkzeug").setLevel(logging.WARNING) + logging.getLogger("flask").setLevel(logging.WARNING) + root_logger.info( + "The logger has been started, log level: %s", + logging.getLevelName(app.config["LOG_LEVEL"]), + ) diff --git a/osgraph-service-py/app/utils/response_handler.py b/osgraph-service-py/app/utils/response_handler.py index 8880c6c..9444e7d 100644 --- a/osgraph-service-py/app/utils/response_handler.py +++ b/osgraph-service-py/app/utils/response_handler.py @@ -1,24 +1,19 @@ from flask import jsonify from typing import Any, Dict, Optional + class ResponseHandler: @staticmethod def success(data: Any = None, message: str = "Success") -> Dict[str, Any]: - return { - "status": 0, - "data": data, - "message": message, - "error": None - } + return {"status": 0, "data": data, "message": message, "error": None} @staticmethod - def error(message: str, status_code: int = 500, error_details: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: - return { - "status": 1, - "data": None, - "message": message, - "error": error_details - } + def error( + message: str, + status_code: int = 500, + error_details: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + return {"status": 1, "data": None, "message": message, "error": error_details} @staticmethod def jsonify_response(response: Dict[str, Any], status_code: int = 200): diff --git a/osgraph-service-py/config.py b/osgraph-service-py/config.py index 44a358c..2f921c2 100644 --- a/osgraph-service-py/config.py +++ b/osgraph-service-py/config.py @@ -3,12 +3,15 @@ load_dotenv() + class Config: LOG_LEVEL = logging.INFO + class DevelopmentConfig(Config): DEBUG = True - LOG_LEVEL = logging.DEBUG + LOG_LEVEL = logging.DEBUG + class ProductionConfig(Config): DEBUG = False diff --git a/osgraph-service-py/poetry.lock b/osgraph-service-py/poetry.lock index cb083d7..5ff5859 100644 --- a/osgraph-service-py/poetry.lock +++ b/osgraph-service-py/poetry.lock @@ -31,6 +31,74 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "bandit" +version = "1.7.10" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.10-py3-none-any.whl", hash = "sha256:665721d7bebbb4485a339c55161ac0eedde27d51e638000d91c8c2d68343ad02"}, + {file = "bandit-1.7.10.tar.gz", hash = "sha256:59ed5caf5d92b6ada4bf65bc6437feea4a9da1093384445fed4d472acc6cff7b"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + +[[package]] +name = "black" +version = "24.10.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +files = [ + {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, + {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, + {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, + {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, + {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, + {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, + {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, + {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, + {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, + {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, + {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, + {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, + {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, + {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, + {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, + {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, + {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, + {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, + {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, + {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, + {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, + {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "blinker" version = "1.8.2" @@ -130,6 +198,22 @@ pyarrow = ["pyarrow (>=1)"] requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"] vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] +[[package]] +name = "flake8" +version = "7.1.1" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" + [[package]] name = "flask" version = "3.0.3" @@ -223,6 +307,20 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.2.0" @@ -333,6 +431,30 @@ files = [ {file = "jiter-0.7.1.tar.gz", hash = "sha256:448cf4f74f7363c34cdef26214da527e8eeffd88ba06d0b80b485ad0667baf5d"}, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "3.0.2" @@ -403,6 +525,91 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.13.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "neo4j" version = "5.26.0" @@ -446,6 +653,66 @@ typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pbr" +version = "6.1.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, + {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pycodestyle" +version = "2.12.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, +] + [[package]] name = "pydantic" version = "2.9.2" @@ -570,6 +837,31 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "python-dotenv" version = "1.0.1" @@ -595,6 +887,86 @@ files = [ {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "sniffio" version = "1.3.1" @@ -606,6 +978,20 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "stevedore" +version = "5.4.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.9" +files = [ + {file = "stevedore-5.4.0-py3-none-any.whl", hash = "sha256:b0be3c4748b3ea7b854b265dcb4caa891015e442416422be16f8b31756107857"}, + {file = "stevedore-5.4.0.tar.gz", hash = "sha256:79e92235ecb828fe952b6b8b0c6c87863248631922c8e8e0fa5b17b232c4514d"}, +] + +[package.dependencies] +pbr = ">=2.0.0" + [[package]] name = "tqdm" version = "4.67.0" @@ -675,4 +1061,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "ab5f656fadab68e9f4628db597dc01518d7fdbfc62a84ff032fcabcd6428ddba" +content-hash = "691bd32ef56ade12daf36c737bd933a9cd9cbb4af22adf07effa6529197ff9b7" diff --git a/osgraph-service-py/pyproject.toml b/osgraph-service-py/pyproject.toml index 3499e48..9301362 100644 --- a/osgraph-service-py/pyproject.toml +++ b/osgraph-service-py/pyproject.toml @@ -14,6 +14,29 @@ elasticsearch = "^8.16.0" openai = "^1.54.5" +[tool.poetry.group.dev.dependencies] +black = "^24.10.0" +isort = "^5.13.2" +flake8 = "^7.1.1" +mypy = "^1.13.0" +bandit = "^1.7.10" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 88 +target-version = ["py311"] + +[tool.isort] +profile = "black" +line_length = 88 +known_first_party = ["osgraph-service-py"] +default_section = "THIRDPARTY" + +[flake8] +max-line-length = 88 +exclude = ".venv,__pycache__,build,dist" + + diff --git a/osgraph-service-py/run.py b/osgraph-service-py/run.py index 75cfee7..339d810 100644 --- a/osgraph-service-py/run.py +++ b/osgraph-service-py/run.py @@ -6,15 +6,12 @@ load_dotenv() -config_mapping = { - 'development': DevelopmentConfig, - 'production': ProductionConfig -} +config_mapping = {"development": DevelopmentConfig, "production": ProductionConfig} -config_name = os.getenv('FLASK_CONFIG', 'production') +config_name = os.getenv("FLASK_CONFIG", "production") config_class = config_mapping.get(config_name.lower(), ProductionConfig) app = create_app(config_class=config_class) -if __name__ == '__main__': - app.run(port=8000, debug=app.config.get('DEBUG', False)) +if __name__ == "__main__": + app.run(port=8000, debug=app.config.get("DEBUG", False)) From 6b34e49fbf1bb4156b1fd51103285c4133dc0a16 Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Thu, 21 Nov 2024 15:03:13 +0800 Subject: [PATCH 03/61] run isort; --- osgraph-service-py/app/__init__.py | 15 +++++++++------ osgraph-service-py/app/dal/graph/tugraph.py | 10 ++++++---- osgraph-service-py/app/dal/search/es.py | 9 +++++---- osgraph-service-py/app/llm/openai_client.py | 4 +++- .../app/managers/develop_activities.py | 19 ++++++++++--------- .../app/managers/fulltext_search.py | 1 + osgraph-service-py/app/managers/graph_list.py | 1 + .../app/managers/os_interest.py | 7 ++++--- osgraph-service-py/app/managers/os_partner.py | 13 +++++++------ .../app/managers/project_community.py | 15 ++++++++------- .../app/managers/project_contribution.py | 17 +++++++++-------- .../app/managers/project_ecology.py | 7 ++++--- .../app/models/default_graph.py | 5 +++-- osgraph-service-py/app/models/graph_view.py | 4 ++-- .../app/routes/develop_activities.py | 10 ++++++---- .../app/routes/fulltext_search.py | 8 +++++--- osgraph-service-py/app/routes/graph_list.py | 8 +++++--- osgraph-service-py/app/routes/os_interest.py | 10 ++++++---- osgraph-service-py/app/routes/os_partner.py | 10 ++++++---- .../app/routes/project_community.py | 10 ++++++---- .../app/routes/project_contribution.py | 10 ++++++---- .../app/routes/project_ecology.py | 10 ++++++---- osgraph-service-py/app/services/__init__.py | 13 +++++++------ .../app/services/fulltext_search.py | 6 ++++-- osgraph-service-py/app/services/graph_list.py | 6 ++++-- .../app/services/graph_services/base.py | 8 +++++--- .../graph_services/develop_activities.py | 10 ++++++---- .../services/graph_services/os_interest.py | 10 ++++++---- .../app/services/graph_services/os_partner.py | 10 ++++++---- .../graph_services/project_community.py | 10 ++++++---- .../graph_services/project_contribution.py | 10 ++++++---- .../graph_services/project_ecology.py | 10 ++++++---- .../app/services/graph_summary.py | 1 + osgraph-service-py/app/utils/logger.py | 2 +- .../app/utils/response_handler.py | 3 ++- osgraph-service-py/config.py | 1 + osgraph-service-py/run.py | 4 +++- 37 files changed, 182 insertions(+), 125 deletions(-) diff --git a/osgraph-service-py/app/__init__.py b/osgraph-service-py/app/__init__.py index 19d1610..491096b 100644 --- a/osgraph-service-py/app/__init__.py +++ b/osgraph-service-py/app/__init__.py @@ -1,13 +1,16 @@ # app/__init__.py -from flask import Flask, jsonify -import os import importlib -from .utils.logger import setup_logger -from .utils.custom_exceptions import InvalidUsage -from app.services import register_all_services +import os + +from dotenv import load_dotenv +from flask import Flask, jsonify + from app.dal.graph.tugraph import GraphClient, GraphLabel, LabelProps from app.models.system_graph import GraphService -from dotenv import load_dotenv +from app.services import register_all_services + +from .utils.custom_exceptions import InvalidUsage +from .utils.logger import setup_logger load_dotenv() diff --git a/osgraph-service-py/app/dal/graph/tugraph.py b/osgraph-service-py/app/dal/graph/tugraph.py index bf1b33c..408305e 100644 --- a/osgraph-service-py/app/dal/graph/tugraph.py +++ b/osgraph-service-py/app/dal/graph/tugraph.py @@ -1,12 +1,14 @@ # app/dal/graph/tugraph.py +import json import os -from neo4j import GraphDatabase +from dataclasses import asdict, dataclass, is_dataclass +from typing import Any, Dict, List, Optional + from dotenv import load_dotenv -from typing import Optional, List, Dict, Any from flask import current_app -from dataclasses import dataclass, asdict, is_dataclass +from neo4j import GraphDatabase + from app.models.default_graph import Edge, Vertex -import json load_dotenv() diff --git a/osgraph-service-py/app/dal/search/es.py b/osgraph-service-py/app/dal/search/es.py index a9511ef..bbb9c6d 100644 --- a/osgraph-service-py/app/dal/search/es.py +++ b/osgraph-service-py/app/dal/search/es.py @@ -1,10 +1,11 @@ # # app/dal/graph/es.py -from elasticsearch import Elasticsearch -from elasticsearch.exceptions import NotFoundError, ConnectionError, RequestError -from typing import Dict, List, Optional, Any import logging -from dotenv import load_dotenv import os +from typing import Any, Dict, List, Optional + +from dotenv import load_dotenv +from elasticsearch import Elasticsearch +from elasticsearch.exceptions import ConnectionError, NotFoundError, RequestError load_dotenv() logging.basicConfig(level=logging.INFO) diff --git a/osgraph-service-py/app/llm/openai_client.py b/osgraph-service-py/app/llm/openai_client.py index dffd1cf..cb30624 100644 --- a/osgraph-service-py/app/llm/openai_client.py +++ b/osgraph-service-py/app/llm/openai_client.py @@ -1,6 +1,8 @@ -from openai import OpenAI import os + from dotenv import load_dotenv +from openai import OpenAI + from app.llm.prompt_templates.graph_summary import get_graph_summary_prompt load_dotenv() diff --git a/osgraph-service-py/app/managers/develop_activities.py b/osgraph-service-py/app/managers/develop_activities.py index 3ac661d..94f98c5 100644 --- a/osgraph-service-py/app/managers/develop_activities.py +++ b/osgraph-service-py/app/managers/develop_activities.py @@ -1,18 +1,19 @@ # app/manager/develop_activities.py +import json +import os +from typing import Any, Dict + from app.models.graph_view import ( - Graph, - User, - Repo, - Push, - CreatePR, CodeReviewAction, - CreateIssue, CommentIssue, + CreateIssue, + CreatePR, + Graph, + Push, + Repo, + User, ) -from typing import Dict, Any from app.services.graph_services.develop_activities import DevelopActivitiesService -import json -import os class DevelopActivitiesManager: diff --git a/osgraph-service-py/app/managers/fulltext_search.py b/osgraph-service-py/app/managers/fulltext_search.py index 9f606a2..3c602c3 100644 --- a/osgraph-service-py/app/managers/fulltext_search.py +++ b/osgraph-service-py/app/managers/fulltext_search.py @@ -1,5 +1,6 @@ # app/manager/fulltext_search.py from typing import List + from app.services.fulltext_search import FulltextSearchService diff --git a/osgraph-service-py/app/managers/graph_list.py b/osgraph-service-py/app/managers/graph_list.py index 1b6fe4f..36d0cd4 100644 --- a/osgraph-service-py/app/managers/graph_list.py +++ b/osgraph-service-py/app/managers/graph_list.py @@ -1,5 +1,6 @@ # app/manager/graph_list.py from typing import List + from app.services.graph_list import GraphListService diff --git a/osgraph-service-py/app/managers/os_interest.py b/osgraph-service-py/app/managers/os_interest.py index 9c21ac6..3375c3b 100644 --- a/osgraph-service-py/app/managers/os_interest.py +++ b/osgraph-service-py/app/managers/os_interest.py @@ -1,9 +1,10 @@ # app/manager/os_interest.py -from app.models.graph_view import Graph, User, Repo, Topic, Belong, ContributeRepo -from typing import Dict, Any -from app.services.graph_services.os_interest import OSInterestService import json import os +from typing import Any, Dict + +from app.models.graph_view import Belong, ContributeRepo, Graph, Repo, Topic, User +from app.services.graph_services.os_interest import OSInterestService class OSInterestManager: diff --git a/osgraph-service-py/app/managers/os_partner.py b/osgraph-service-py/app/managers/os_partner.py index efae566..9e83ae2 100644 --- a/osgraph-service-py/app/managers/os_partner.py +++ b/osgraph-service-py/app/managers/os_partner.py @@ -1,16 +1,17 @@ # app/manager/os_partner.py +import json +import os +from typing import Any, Dict + from app.models.graph_view import ( - Graph, - User, CommonIssue, CommonPR, - CommonStar, CommonRepo, + CommonStar, + Graph, + User, ) -from typing import Dict, Any from app.services.graph_services.os_partner import OSPartnerService -import json -import os class OSPartnerManager: diff --git a/osgraph-service-py/app/managers/project_community.py b/osgraph-service-py/app/managers/project_community.py index 5e30a5a..58873a7 100644 --- a/osgraph-service-py/app/managers/project_community.py +++ b/osgraph-service-py/app/managers/project_community.py @@ -1,18 +1,19 @@ # app/manager/project_community.py +import json +import os +from typing import Any, Dict + from app.models.graph_view import ( - Graph, - Repo, + Belong, Company, Country, - User, + Graph, PullRequestAction, + Repo, Star, - Belong, + User, ) -from typing import Dict, Any from app.services.graph_services.project_community import ProjectCommunityService -import json -import os class ProjectCommunityManager: diff --git a/osgraph-service-py/app/managers/project_contribution.py b/osgraph-service-py/app/managers/project_contribution.py index daf5616..4fd20c5 100644 --- a/osgraph-service-py/app/managers/project_contribution.py +++ b/osgraph-service-py/app/managers/project_contribution.py @@ -1,18 +1,19 @@ # app/manager/project_contribution.py +import json +import os +from typing import Any, Dict + from app.models.graph_view import ( - Graph, - User, - Repo, CodeReviewAction, - CreateIssue, - CommitAction, CommentIssue, + CommitAction, + CreateIssue, CreatePR, + Graph, + Repo, + User, ) -from typing import Dict, Any from app.services.graph_services.project_contribution import ProjectContributionService -import json -import os class ProjectContributionManager: diff --git a/osgraph-service-py/app/managers/project_ecology.py b/osgraph-service-py/app/managers/project_ecology.py index 25d52d1..aec5ba5 100644 --- a/osgraph-service-py/app/managers/project_ecology.py +++ b/osgraph-service-py/app/managers/project_ecology.py @@ -1,9 +1,10 @@ # app/manager/project_ecology.py -from app.models.graph_view import Graph, User, Repo, Orgnization, Belong, CommonDevelop -from typing import Dict, Any -from app.services.graph_services.project_ecology import ProjectEcologyService import json import os +from typing import Any, Dict + +from app.models.graph_view import Belong, CommonDevelop, Graph, Orgnization, Repo, User +from app.services.graph_services.project_ecology import ProjectEcologyService class ProjectEcologyManager: diff --git a/osgraph-service-py/app/models/default_graph.py b/osgraph-service-py/app/models/default_graph.py index 5a44c0b..1083876 100644 --- a/osgraph-service-py/app/models/default_graph.py +++ b/osgraph-service-py/app/models/default_graph.py @@ -1,6 +1,7 @@ -from dataclasses import dataclass, asdict -from typing import Any, Optional import os +from dataclasses import asdict, dataclass +from typing import Any, Optional + from dotenv import load_dotenv load_dotenv() diff --git a/osgraph-service-py/app/models/graph_view.py b/osgraph-service-py/app/models/graph_view.py index 0cb5e1f..6908e80 100644 --- a/osgraph-service-py/app/models/graph_view.py +++ b/osgraph-service-py/app/models/graph_view.py @@ -1,8 +1,8 @@ # app/models/graph_view.py -from dataclasses import dataclass, field, asdict -from typing import List, Optional, Literal import json +from dataclasses import asdict, dataclass, field +from typing import List, Literal, Optional @dataclass diff --git a/osgraph-service-py/app/routes/develop_activities.py b/osgraph-service-py/app/routes/develop_activities.py index 92fa517..dff0f6c 100644 --- a/osgraph-service-py/app/routes/develop_activities.py +++ b/osgraph-service-py/app/routes/develop_activities.py @@ -1,10 +1,12 @@ -from flask import Blueprint, request, abort +import logging +from dataclasses import asdict +from typing import Any, Dict + +from flask import Blueprint, abort, request + from app.managers.develop_activities import DevelopActivitiesManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict develop_activities_bp = Blueprint( "project_activities", __name__, url_prefix="/api/graph" diff --git a/osgraph-service-py/app/routes/fulltext_search.py b/osgraph-service-py/app/routes/fulltext_search.py index d26c8af..be24122 100644 --- a/osgraph-service-py/app/routes/fulltext_search.py +++ b/osgraph-service-py/app/routes/fulltext_search.py @@ -1,10 +1,12 @@ +import logging +from dataclasses import asdict +from typing import Any, Dict + from flask import Blueprint, request + from app.managers.fulltext_search import FulltextSearchManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict fulltext_search_bp = Blueprint("fulltext_search", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) diff --git a/osgraph-service-py/app/routes/graph_list.py b/osgraph-service-py/app/routes/graph_list.py index 0eba253..70cdefa 100644 --- a/osgraph-service-py/app/routes/graph_list.py +++ b/osgraph-service-py/app/routes/graph_list.py @@ -1,10 +1,12 @@ +import logging +from dataclasses import asdict +from typing import Any, Dict + from flask import Blueprint + from app.managers.graph_list import GraphListManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict graph_list_bp = Blueprint("graph_list", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) diff --git a/osgraph-service-py/app/routes/os_interest.py b/osgraph-service-py/app/routes/os_interest.py index 32a721f..773ce8e 100644 --- a/osgraph-service-py/app/routes/os_interest.py +++ b/osgraph-service-py/app/routes/os_interest.py @@ -1,10 +1,12 @@ -from flask import Blueprint, request, abort +import logging +from dataclasses import asdict +from typing import Any, Dict + +from flask import Blueprint, abort, request + from app.managers.os_interest import OSInterestManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict os_interest_bp = Blueprint("os_interest", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) diff --git a/osgraph-service-py/app/routes/os_partner.py b/osgraph-service-py/app/routes/os_partner.py index 2330095..8c9c7cd 100644 --- a/osgraph-service-py/app/routes/os_partner.py +++ b/osgraph-service-py/app/routes/os_partner.py @@ -1,10 +1,12 @@ -from flask import Blueprint, request, abort +import logging +from dataclasses import asdict +from typing import Any, Dict + +from flask import Blueprint, abort, request + from app.managers.os_partner import OSPartnerManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict os_partner_bp = Blueprint("os_partner", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) diff --git a/osgraph-service-py/app/routes/project_community.py b/osgraph-service-py/app/routes/project_community.py index 87e5b7a..bb0bb4a 100644 --- a/osgraph-service-py/app/routes/project_community.py +++ b/osgraph-service-py/app/routes/project_community.py @@ -1,10 +1,12 @@ -from flask import Blueprint, request, abort +import logging +from dataclasses import asdict +from typing import Any, Dict + +from flask import Blueprint, abort, request + from app.managers.project_community import ProjectCommunityManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict project_community_bp = Blueprint("project_community", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) diff --git a/osgraph-service-py/app/routes/project_contribution.py b/osgraph-service-py/app/routes/project_contribution.py index 3032cc6..6c8aa0d 100644 --- a/osgraph-service-py/app/routes/project_contribution.py +++ b/osgraph-service-py/app/routes/project_contribution.py @@ -1,10 +1,12 @@ -from flask import Blueprint, request, abort +import logging +from dataclasses import asdict +from typing import Any, Dict + +from flask import Blueprint, abort, request + from app.managers.project_contribution import ProjectContributionManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict project_contribution_bp = Blueprint( "project_contribution", __name__, url_prefix="/api/graph" diff --git a/osgraph-service-py/app/routes/project_ecology.py b/osgraph-service-py/app/routes/project_ecology.py index bdcc1df..0594127 100644 --- a/osgraph-service-py/app/routes/project_ecology.py +++ b/osgraph-service-py/app/routes/project_ecology.py @@ -1,10 +1,12 @@ -from flask import Blueprint, request, abort +import logging +from dataclasses import asdict +from typing import Any, Dict + +from flask import Blueprint, abort, request + from app.managers.project_ecology import ProjectEcologyManager from app.utils.custom_exceptions import InvalidUsage from app.utils.response_handler import ResponseHandler -from typing import Dict, Any -import logging -from dataclasses import asdict project_ecology_bp = Blueprint("project_ecology", __name__, url_prefix="/api/graph") logger = logging.getLogger(__name__) diff --git a/osgraph-service-py/app/services/__init__.py b/osgraph-service-py/app/services/__init__.py index 3d2d2b5..6ab275a 100644 --- a/osgraph-service-py/app/services/__init__.py +++ b/osgraph-service-py/app/services/__init__.py @@ -1,16 +1,17 @@ # app/services/__init__.py from flask import current_app -from app.services.graph_services.project_contribution import ( - ProjectContributionServiceConfig, -) -from app.services.graph_services.project_ecology import ProjectEcologyServiceConfig -from app.services.graph_services.project_community import ProjectCommunityServiceConfig + from app.services.graph_services.develop_activities import ( DevelopActivitiesServiceConfig, ) -from app.services.graph_services.os_partner import OSPartnerServiceConfig from app.services.graph_services.os_interest import OSInterestServiceConfig +from app.services.graph_services.os_partner import OSPartnerServiceConfig +from app.services.graph_services.project_community import ProjectCommunityServiceConfig +from app.services.graph_services.project_contribution import ( + ProjectContributionServiceConfig, +) +from app.services.graph_services.project_ecology import ProjectEcologyServiceConfig SERVICE_CONFIGS = [ ProjectContributionServiceConfig(), diff --git a/osgraph-service-py/app/services/fulltext_search.py b/osgraph-service-py/app/services/fulltext_search.py index dfa4d90..ed86f81 100644 --- a/osgraph-service-py/app/services/fulltext_search.py +++ b/osgraph-service-py/app/services/fulltext_search.py @@ -1,8 +1,10 @@ +import os from typing import Any + +from dotenv import load_dotenv + from app.dal.search.es import ElasticsearchClient from app.utils.custom_exceptions import InvalidUsage -import os -from dotenv import load_dotenv load_dotenv() diff --git a/osgraph-service-py/app/services/graph_list.py b/osgraph-service-py/app/services/graph_list.py index 48ee853..64fff24 100644 --- a/osgraph-service-py/app/services/graph_list.py +++ b/osgraph-service-py/app/services/graph_list.py @@ -1,8 +1,10 @@ -from typing import Any -from app.dal.graph.tugraph import GraphClient import os +from typing import Any + from dotenv import load_dotenv +from app.dal.graph.tugraph import GraphClient + load_dotenv() diff --git a/osgraph-service-py/app/services/graph_services/base.py b/osgraph-service-py/app/services/graph_services/base.py index f456a8b..3568001 100644 --- a/osgraph-service-py/app/services/graph_services/base.py +++ b/osgraph-service-py/app/services/graph_services/base.py @@ -1,11 +1,13 @@ +import os from abc import ABC, abstractmethod -from typing import List, Dict, Any, Callable, Union from dataclasses import dataclass -from app.utils.custom_exceptions import InvalidUsage +from typing import Any, Callable, Dict, List, Union + from dotenv import load_dotenv + from app.dal.graph.tugraph import GraphClient from app.models.system_graph import GraphService -import os +from app.utils.custom_exceptions import InvalidUsage load_dotenv() diff --git a/osgraph-service-py/app/services/graph_services/develop_activities.py b/osgraph-service-py/app/services/graph_services/develop_activities.py index 4c221f8..8aadb7b 100644 --- a/osgraph-service-py/app/services/graph_services/develop_activities.py +++ b/osgraph-service-py/app/services/graph_services/develop_activities.py @@ -1,10 +1,12 @@ +import os from datetime import datetime, timedelta -from typing import Dict, Any -from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from typing import Any, Dict + +from dotenv import load_dotenv + from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os -from dotenv import load_dotenv +from app.services.graph_services.base import BaseService, FilterKey, ServiceConfig load_dotenv() diff --git a/osgraph-service-py/app/services/graph_services/os_interest.py b/osgraph-service-py/app/services/graph_services/os_interest.py index e5c55e4..8c54e57 100644 --- a/osgraph-service-py/app/services/graph_services/os_interest.py +++ b/osgraph-service-py/app/services/graph_services/os_interest.py @@ -1,10 +1,12 @@ +import os from datetime import datetime, timedelta -from typing import Dict, Any -from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from typing import Any, Dict + +from dotenv import load_dotenv + from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os -from dotenv import load_dotenv +from app.services.graph_services.base import BaseService, FilterKey, ServiceConfig load_dotenv() diff --git a/osgraph-service-py/app/services/graph_services/os_partner.py b/osgraph-service-py/app/services/graph_services/os_partner.py index ef460a4..eaa208a 100644 --- a/osgraph-service-py/app/services/graph_services/os_partner.py +++ b/osgraph-service-py/app/services/graph_services/os_partner.py @@ -1,10 +1,12 @@ +import os from datetime import datetime, timedelta -from typing import Dict, Any -from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from typing import Any, Dict + +from dotenv import load_dotenv + from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os -from dotenv import load_dotenv +from app.services.graph_services.base import BaseService, FilterKey, ServiceConfig load_dotenv() diff --git a/osgraph-service-py/app/services/graph_services/project_community.py b/osgraph-service-py/app/services/graph_services/project_community.py index 58f54e3..5ddbd48 100644 --- a/osgraph-service-py/app/services/graph_services/project_community.py +++ b/osgraph-service-py/app/services/graph_services/project_community.py @@ -1,10 +1,12 @@ +import os from datetime import datetime, timedelta -from typing import Dict, Any -from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from typing import Any, Dict + +from dotenv import load_dotenv + from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os -from dotenv import load_dotenv +from app.services.graph_services.base import BaseService, FilterKey, ServiceConfig load_dotenv() diff --git a/osgraph-service-py/app/services/graph_services/project_contribution.py b/osgraph-service-py/app/services/graph_services/project_contribution.py index 2e70c7f..efa9ab2 100644 --- a/osgraph-service-py/app/services/graph_services/project_contribution.py +++ b/osgraph-service-py/app/services/graph_services/project_contribution.py @@ -1,10 +1,12 @@ +import os from datetime import datetime, timedelta -from typing import Dict, Any -from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from typing import Any, Dict + +from dotenv import load_dotenv + from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os -from dotenv import load_dotenv +from app.services.graph_services.base import BaseService, FilterKey, ServiceConfig load_dotenv() diff --git a/osgraph-service-py/app/services/graph_services/project_ecology.py b/osgraph-service-py/app/services/graph_services/project_ecology.py index 8721a28..b1483a4 100644 --- a/osgraph-service-py/app/services/graph_services/project_ecology.py +++ b/osgraph-service-py/app/services/graph_services/project_ecology.py @@ -1,10 +1,12 @@ +import os from datetime import datetime, timedelta -from typing import Dict, Any -from app.services.graph_services.base import BaseService, ServiceConfig, FilterKey +from typing import Any, Dict + +from dotenv import load_dotenv + from app.dal.graph.tugraph import GraphClient from app.dal.search.es import ElasticsearchClient -import os -from dotenv import load_dotenv +from app.services.graph_services.base import BaseService, FilterKey, ServiceConfig load_dotenv() diff --git a/osgraph-service-py/app/services/graph_summary.py b/osgraph-service-py/app/services/graph_summary.py index 214a3c5..9e76444 100644 --- a/osgraph-service-py/app/services/graph_summary.py +++ b/osgraph-service-py/app/services/graph_summary.py @@ -1,4 +1,5 @@ from typing import Any + from app.llm.openai_client import OpenAIClient diff --git a/osgraph-service-py/app/utils/logger.py b/osgraph-service-py/app/utils/logger.py index 77bea19..6a8473d 100644 --- a/osgraph-service-py/app/utils/logger.py +++ b/osgraph-service-py/app/utils/logger.py @@ -1,6 +1,6 @@ import logging -from logging.handlers import RotatingFileHandler import os +from logging.handlers import RotatingFileHandler def setup_logger(app): diff --git a/osgraph-service-py/app/utils/response_handler.py b/osgraph-service-py/app/utils/response_handler.py index 9444e7d..c444f01 100644 --- a/osgraph-service-py/app/utils/response_handler.py +++ b/osgraph-service-py/app/utils/response_handler.py @@ -1,6 +1,7 @@ -from flask import jsonify from typing import Any, Dict, Optional +from flask import jsonify + class ResponseHandler: @staticmethod diff --git a/osgraph-service-py/config.py b/osgraph-service-py/config.py index 2f921c2..d65e629 100644 --- a/osgraph-service-py/config.py +++ b/osgraph-service-py/config.py @@ -1,4 +1,5 @@ import logging + from dotenv import load_dotenv load_dotenv() diff --git a/osgraph-service-py/run.py b/osgraph-service-py/run.py index 339d810..c24f7b0 100644 --- a/osgraph-service-py/run.py +++ b/osgraph-service-py/run.py @@ -1,7 +1,9 @@ # run.py import os -from app import create_app + from dotenv import load_dotenv + +from app import create_app from config import DevelopmentConfig, ProductionConfig load_dotenv() From aaf2cecc5819cc14e72df4e2588ae5c9d7e2aa97 Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Thu, 21 Nov 2024 15:37:12 +0800 Subject: [PATCH 04/61] fixed; --- osgraph-service-py/.flake8 | 3 +++ osgraph-service-py/app/dal/graph/tugraph.py | 8 +++++--- .../app/llm/prompt_templates/graph_summary.py | 5 ++++- .../app/managers/develop_activities.py | 11 +---------- .../app/managers/project_community.py | 1 - .../app/managers/project_ecology.py | 2 +- osgraph-service-py/app/models/default_graph.py | 16 +++++++++------- osgraph-service-py/app/models/graph_view.py | 1 - osgraph-service-py/app/models/system_graph.py | 7 ++++++- .../app/routes/develop_activities.py | 5 ++--- osgraph-service-py/app/routes/fulltext_search.py | 3 +-- osgraph-service-py/app/routes/graph_list.py | 3 +-- osgraph-service-py/app/routes/os_interest.py | 5 ++--- osgraph-service-py/app/routes/os_partner.py | 5 ++--- .../app/routes/project_community.py | 5 ++--- .../app/routes/project_contribution.py | 5 ++--- osgraph-service-py/app/routes/project_ecology.py | 5 ++--- .../app/services/fulltext_search.py | 1 - osgraph-service-py/app/services/graph_list.py | 2 +- .../graph_services/develop_activities.py | 7 ++++++- .../app/services/graph_services/os_interest.py | 13 ++++++++++++- .../app/services/graph_services/os_partner.py | 7 ++++++- .../services/graph_services/project_community.py | 8 +++++++- .../graph_services/project_contribution.py | 10 ++++++++-- .../services/graph_services/project_ecology.py | 7 ++++++- osgraph-service-py/poetry.lock | 16 +++++++++++++++- osgraph-service-py/pyproject.toml | 3 ++- 27 files changed, 106 insertions(+), 58 deletions(-) create mode 100644 osgraph-service-py/.flake8 diff --git a/osgraph-service-py/.flake8 b/osgraph-service-py/.flake8 new file mode 100644 index 0000000..7fb6806 --- /dev/null +++ b/osgraph-service-py/.flake8 @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 88 +exclude = .venv,__pycache__,build,dist,static \ No newline at end of file diff --git a/osgraph-service-py/app/dal/graph/tugraph.py b/osgraph-service-py/app/dal/graph/tugraph.py index 408305e..8e2b377 100644 --- a/osgraph-service-py/app/dal/graph/tugraph.py +++ b/osgraph-service-py/app/dal/graph/tugraph.py @@ -101,12 +101,13 @@ def create_vertex(self, label: str, properties: Dict[str, any]): with self.driver.session(database=self.graph_name) as session: result = session.run(query) current_app.logger.info( - f"Vertex '{ json.dumps(properties)}' created success." + f"Vertex '{json.dumps(properties)}' created success." ) return result.data() except Exception as e: current_app.logger.info( - f"Vertex '{ json.dumps(properties)}' created faild. Error message : {str(e)}" + f"Vertex '{json.dumps(properties)}' created faild. " + f"Error message : {str(e)}" ) # 创建边 @@ -152,7 +153,8 @@ def create_relationship( return result.data() except Exception as e: current_app.logger.error( - f"Relationship '{json.dumps(properties)}' creation failed. Error message: {str(e)}" + f"Relationship '{json.dumps(properties)}' creation failed. " + f"Error message: {str(e)}" ) return None diff --git a/osgraph-service-py/app/llm/prompt_templates/graph_summary.py b/osgraph-service-py/app/llm/prompt_templates/graph_summary.py index 5521869..efb604d 100644 --- a/osgraph-service-py/app/llm/prompt_templates/graph_summary.py +++ b/osgraph-service-py/app/llm/prompt_templates/graph_summary.py @@ -14,5 +14,8 @@ def get_graph_summary_prompt(graph_data: dict) -> str: "3. 图中是否有明显的社区或分组?如果有,简要描述。\n" "4. 总结图的主要特征和潜在意义。" ) - graph_string = f"vertices: {graph_data.get('vertices', [])}, edges: {graph_data.get('edges', [])}" + graph_string = ( + f"vertices: {graph_data.get('vertices', [])}, " + f"edges: {graph_data.get('edges', [])}" + ) return template.replace("{graph}", graph_string) diff --git a/osgraph-service-py/app/managers/develop_activities.py b/osgraph-service-py/app/managers/develop_activities.py index 94f98c5..f015f14 100644 --- a/osgraph-service-py/app/managers/develop_activities.py +++ b/osgraph-service-py/app/managers/develop_activities.py @@ -3,16 +3,7 @@ import os from typing import Any, Dict -from app.models.graph_view import ( - CodeReviewAction, - CommentIssue, - CreateIssue, - CreatePR, - Graph, - Push, - Repo, - User, -) +from app.models.graph_view import Graph, Push, Repo, User from app.services.graph_services.develop_activities import DevelopActivitiesService diff --git a/osgraph-service-py/app/managers/project_community.py b/osgraph-service-py/app/managers/project_community.py index 58873a7..27f73bf 100644 --- a/osgraph-service-py/app/managers/project_community.py +++ b/osgraph-service-py/app/managers/project_community.py @@ -5,7 +5,6 @@ from app.models.graph_view import ( Belong, - Company, Country, Graph, PullRequestAction, diff --git a/osgraph-service-py/app/managers/project_ecology.py b/osgraph-service-py/app/managers/project_ecology.py index aec5ba5..5c4356c 100644 --- a/osgraph-service-py/app/managers/project_ecology.py +++ b/osgraph-service-py/app/managers/project_ecology.py @@ -3,7 +3,7 @@ import os from typing import Any, Dict -from app.models.graph_view import Belong, CommonDevelop, Graph, Orgnization, Repo, User +from app.models.graph_view import Belong, CommonDevelop, Graph, Orgnization, Repo from app.services.graph_services.project_ecology import ProjectEcologyService diff --git a/osgraph-service-py/app/models/default_graph.py b/osgraph-service-py/app/models/default_graph.py index 1083876..d322f0e 100644 --- a/osgraph-service-py/app/models/default_graph.py +++ b/osgraph-service-py/app/models/default_graph.py @@ -1,5 +1,5 @@ import os -from dataclasses import asdict, dataclass +from dataclasses import dataclass from typing import Any, Optional from dotenv import load_dotenv @@ -49,7 +49,8 @@ def props(self) -> Any: def __repr__(self): return ( f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, " - f"type={self.type}, source={self.source}, target={self.target}, props={self.props})" + f"type={self.type}, source={self.source}, " + f"target={self.target}, props={self.props})" ) @@ -69,11 +70,12 @@ def __init__(self, props: GitHubUserProps): self._props = props def __repr__(self): - return f"{self.__class__.__name__}(label={self.label}, primary={self.primary}, props={self._props})" - - -from dataclasses import dataclass -from typing import Optional + return ( + f"{self.__class__.__name__}(" + f"label={self.label}, " + f"primary={self.primary}, " + f"props={self._props})" + ) @dataclass diff --git a/osgraph-service-py/app/models/graph_view.py b/osgraph-service-py/app/models/graph_view.py index 6908e80..2124442 100644 --- a/osgraph-service-py/app/models/graph_view.py +++ b/osgraph-service-py/app/models/graph_view.py @@ -1,6 +1,5 @@ # app/models/graph_view.py -import json from dataclasses import asdict, dataclass, field from typing import List, Literal, Optional diff --git a/osgraph-service-py/app/models/system_graph.py b/osgraph-service-py/app/models/system_graph.py index 3de99dc..99536af 100644 --- a/osgraph-service-py/app/models/system_graph.py +++ b/osgraph-service-py/app/models/system_graph.py @@ -23,4 +23,9 @@ def __init__(self, name: str, comment: str, input_types: str, filter_keys: str): ) def __repr__(self): - return f"GitHubUser(label={self.label}, pk={self.primary}, type={self.type}, props={self.props})" + return ( + f"{self.__class__.__name__}(" + f"label={self.label}, " + f"primary={self.primary}, " + f"props={self.props})" + ) diff --git a/osgraph-service-py/app/routes/develop_activities.py b/osgraph-service-py/app/routes/develop_activities.py index dff0f6c..bd56aa3 100644 --- a/osgraph-service-py/app/routes/develop_activities.py +++ b/osgraph-service-py/app/routes/develop_activities.py @@ -1,8 +1,7 @@ import logging -from dataclasses import asdict from typing import Any, Dict -from flask import Blueprint, abort, request +from flask import Blueprint, request from app.managers.develop_activities import DevelopActivitiesManager from app.utils.custom_exceptions import InvalidUsage @@ -25,7 +24,7 @@ def get_activities_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/routes/fulltext_search.py b/osgraph-service-py/app/routes/fulltext_search.py index be24122..d02ae9c 100644 --- a/osgraph-service-py/app/routes/fulltext_search.py +++ b/osgraph-service-py/app/routes/fulltext_search.py @@ -1,5 +1,4 @@ import logging -from dataclasses import asdict from typing import Any, Dict from flask import Blueprint, request @@ -23,7 +22,7 @@ def search(self, data) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/routes/graph_list.py b/osgraph-service-py/app/routes/graph_list.py index 70cdefa..5c0b77c 100644 --- a/osgraph-service-py/app/routes/graph_list.py +++ b/osgraph-service-py/app/routes/graph_list.py @@ -1,5 +1,4 @@ import logging -from dataclasses import asdict from typing import Any, Dict from flask import Blueprint @@ -23,7 +22,7 @@ def get_graph_list(self) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/routes/os_interest.py b/osgraph-service-py/app/routes/os_interest.py index 773ce8e..f742780 100644 --- a/osgraph-service-py/app/routes/os_interest.py +++ b/osgraph-service-py/app/routes/os_interest.py @@ -1,8 +1,7 @@ import logging -from dataclasses import asdict from typing import Any, Dict -from flask import Blueprint, abort, request +from flask import Blueprint, request from app.managers.os_interest import OSInterestManager from app.utils.custom_exceptions import InvalidUsage @@ -26,7 +25,7 @@ def get_interest_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/routes/os_partner.py b/osgraph-service-py/app/routes/os_partner.py index 8c9c7cd..d55c1b5 100644 --- a/osgraph-service-py/app/routes/os_partner.py +++ b/osgraph-service-py/app/routes/os_partner.py @@ -1,8 +1,7 @@ import logging -from dataclasses import asdict from typing import Any, Dict -from flask import Blueprint, abort, request +from flask import Blueprint, request from app.managers.os_partner import OSPartnerManager from app.utils.custom_exceptions import InvalidUsage @@ -23,7 +22,7 @@ def get_partner_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/routes/project_community.py b/osgraph-service-py/app/routes/project_community.py index bb0bb4a..02fe63c 100644 --- a/osgraph-service-py/app/routes/project_community.py +++ b/osgraph-service-py/app/routes/project_community.py @@ -1,8 +1,7 @@ import logging -from dataclasses import asdict from typing import Any, Dict -from flask import Blueprint, abort, request +from flask import Blueprint, request from app.managers.project_community import ProjectCommunityManager from app.utils.custom_exceptions import InvalidUsage @@ -23,7 +22,7 @@ def get_community_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/routes/project_contribution.py b/osgraph-service-py/app/routes/project_contribution.py index 6c8aa0d..603cd16 100644 --- a/osgraph-service-py/app/routes/project_contribution.py +++ b/osgraph-service-py/app/routes/project_contribution.py @@ -1,8 +1,7 @@ import logging -from dataclasses import asdict from typing import Any, Dict -from flask import Blueprint, abort, request +from flask import Blueprint, request from app.managers.project_contribution import ProjectContributionManager from app.utils.custom_exceptions import InvalidUsage @@ -25,7 +24,7 @@ def get_contribution_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/routes/project_ecology.py b/osgraph-service-py/app/routes/project_ecology.py index 0594127..01cb2ff 100644 --- a/osgraph-service-py/app/routes/project_ecology.py +++ b/osgraph-service-py/app/routes/project_ecology.py @@ -1,8 +1,7 @@ import logging -from dataclasses import asdict from typing import Any, Dict -from flask import Blueprint, abort, request +from flask import Blueprint, request from app.managers.project_ecology import ProjectEcologyManager from app.utils.custom_exceptions import InvalidUsage @@ -23,7 +22,7 @@ def get_ecology_graph(self, data: Dict[str, Any]) -> Dict[str, Any]: except InvalidUsage as e: logger.error(f"Invalid usage: {str(e)}") return ResponseHandler.error(str(e.message), e.status_code) - except Exception as e: + except Exception: logger.exception("Internal server error") return ResponseHandler.error("Internal server error", 500) diff --git a/osgraph-service-py/app/services/fulltext_search.py b/osgraph-service-py/app/services/fulltext_search.py index ed86f81..b92d4b1 100644 --- a/osgraph-service-py/app/services/fulltext_search.py +++ b/osgraph-service-py/app/services/fulltext_search.py @@ -1,4 +1,3 @@ -import os from typing import Any from dotenv import load_dotenv diff --git a/osgraph-service-py/app/services/graph_list.py b/osgraph-service-py/app/services/graph_list.py index 64fff24..59e6b0a 100644 --- a/osgraph-service-py/app/services/graph_list.py +++ b/osgraph-service-py/app/services/graph_list.py @@ -12,6 +12,6 @@ class GraphListService: def execute(self) -> Any: graph_name = os.getenv("TUGRAPHDB_OSGRAPH_SYSTEM_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f"""MATCH (n:graph_service) RETURN n""" + cypher = """MATCH (n:graph_service) RETURN n""" result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/develop_activities.py b/osgraph-service-py/app/services/graph_services/develop_activities.py index 8aadb7b..b1c6280 100644 --- a/osgraph-service-py/app/services/graph_services/develop_activities.py +++ b/osgraph-service-py/app/services/graph_services/develop_activities.py @@ -46,6 +46,11 @@ def execute(self, data: Dict[str, Any]) -> Any: develop_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f"""CALL osgraph.get_developer_contribution('{{"developer_id":{develop_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" + cypher = ( + f"CALL osgraph.get_developer_contribution('{{" + f'"developer_id":{develop_id},"top_n":{topn}' + f"}}') YIELD start_node, relationship, end_node " + "return start_node, relationship, end_node" + ) result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/os_interest.py b/osgraph-service-py/app/services/graph_services/os_interest.py index 8c54e57..831a544 100644 --- a/osgraph-service-py/app/services/graph_services/os_interest.py +++ b/osgraph-service-py/app/services/graph_services/os_interest.py @@ -1,3 +1,4 @@ +import json import os from datetime import datetime, timedelta from typing import Any, Dict @@ -50,6 +51,16 @@ def execute(self, data: Dict[str, Any]) -> Any: user_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f"""CALL osgraph.get_developer_repos_profile('{{"developer_id":{user_id},"topic_topn":{topic_topn},"repo_topn":{repo_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" + params_dict = { + "developer_id": user_id, + "topic_topn": topic_topn, + "repo_topn": repo_topn, + } + params = json.dumps(params_dict) + cypher = ( + f"CALL osgraph.get_developer_repos_profile('{params}') " + "YIELD start_node, relationship, end_node " + "return start_node, relationship, end_node" + ) result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/os_partner.py b/osgraph-service-py/app/services/graph_services/os_partner.py index eaa208a..d5c6055 100644 --- a/osgraph-service-py/app/services/graph_services/os_partner.py +++ b/osgraph-service-py/app/services/graph_services/os_partner.py @@ -46,6 +46,11 @@ def execute(self, data: Dict[str, Any]) -> Any: user_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f"""CALL osgraph.get_developer_by_developer('{{"developer_id":{user_id},"top_n":{topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" + cypher = ( + f"CALL osgraph.get_developer_by_developer('{{" + f'"developer_id":{user_id},"top_n":{topn}' + f"}}') YIELD start_node, relationship, end_node " + "return start_node, relationship, end_node" + ) result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/project_community.py b/osgraph-service-py/app/services/graph_services/project_community.py index 5ddbd48..e9d2a40 100644 --- a/osgraph-service-py/app/services/graph_services/project_community.py +++ b/osgraph-service-py/app/services/graph_services/project_community.py @@ -50,6 +50,12 @@ def execute(self, data: Dict[str, Any]) -> Any: repo_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f"""CALL osgraph.get_repo_developers_profile('{{"repo_id":{repo_id},"company_topn":{company_topn},"country_topn":{country_topn},"developer_topn":{developer_topn}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" + cypher = ( + f"CALL osgraph.get_repo_developers_profile('{{" + f'"repo_id":{repo_id},"company_topn":{company_topn},' + f'"country_topn":{country_topn},"developer_topn":{developer_topn}' + f"}}') YIELD start_node, relationship, end_node " + "return start_node, relationship, end_node" + ) result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/project_contribution.py b/osgraph-service-py/app/services/graph_services/project_contribution.py index efa9ab2..6ef9fd8 100644 --- a/osgraph-service-py/app/services/graph_services/project_contribution.py +++ b/osgraph-service-py/app/services/graph_services/project_contribution.py @@ -54,7 +54,7 @@ def execute(self, data: Dict[str, Any]) -> Any: github_repo: str = validated_data["GitHubRepo"] start_time: int = validated_data["start-time"] or get_default_start_time() end_time: int = validated_data["end-time"] or get_default_end_time() - config_name = os.getenv("FLASK_CONFIG") + os.getenv("FLASK_CONFIG") # if config_name == 'development': # start_time = 0 start_time = 0 @@ -66,6 +66,12 @@ def execute(self, data: Dict[str, Any]) -> Any: repo_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f"""CALL osgraph.get_repo_contribution('{{"repo_id":{repo_id},"start_timestamp":{start_time},"end_timestamp":{end_time},"top_n":{contribution_limit}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" + cypher = ( + f"CALL osgraph.get_repo_contribution('{{" + f'"repo_id":{repo_id},"start_timestamp":{start_time},' + f'"end_timestamp":{end_time},"top_n":{contribution_limit}' + f"}}') YIELD start_node, relationship, end_node " + "return start_node, relationship, end_node" + ) result = client.run(cypher) return result diff --git a/osgraph-service-py/app/services/graph_services/project_ecology.py b/osgraph-service-py/app/services/graph_services/project_ecology.py index b1483a4..e81982a 100644 --- a/osgraph-service-py/app/services/graph_services/project_ecology.py +++ b/osgraph-service-py/app/services/graph_services/project_ecology.py @@ -46,6 +46,11 @@ def execute(self, data: Dict[str, Any]) -> Any: repo_id = res[0]["id"] graph_name = os.getenv("TUGRAPHDB_OSGRAPH_GITHUB_GRAPH_NAME") client = GraphClient(graph_name) - cypher = f"""CALL osgraph.get_repo_by_repo('{{"repo_id":{repo_id}, "top_n":{top_n}}}') YIELD start_node, relationship, end_node return start_node, relationship, end_node""" + cypher = ( + f"CALL osgraph.get_repo_by_repo('{{" + f'"repo_id":{repo_id}, "top_n":{top_n}' + f"}}') YIELD start_node, relationship, end_node " + "return start_node, relationship, end_node" + ) result = client.run(cypher) return result diff --git a/osgraph-service-py/poetry.lock b/osgraph-service-py/poetry.lock index 5ff5859..9a2ff86 100644 --- a/osgraph-service-py/poetry.lock +++ b/osgraph-service-py/poetry.lock @@ -31,6 +31,20 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "autoflake" +version = "2.3.1" +description = "Removes unused imports and unused variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840"}, + {file = "autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" + [[package]] name = "bandit" version = "1.7.10" @@ -1061,4 +1075,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "691bd32ef56ade12daf36c737bd933a9cd9cbb4af22adf07effa6529197ff9b7" +content-hash = "da2fffeb806d7892673f03aeeb69ba55e17cb9ee490516e790c5488e7dcdd7fd" diff --git a/osgraph-service-py/pyproject.toml b/osgraph-service-py/pyproject.toml index 9301362..114fe2f 100644 --- a/osgraph-service-py/pyproject.toml +++ b/osgraph-service-py/pyproject.toml @@ -20,6 +20,7 @@ isort = "^5.13.2" flake8 = "^7.1.1" mypy = "^1.13.0" bandit = "^1.7.10" +autoflake = "^2.3.1" [build-system] requires = ["poetry-core"] @@ -37,6 +38,6 @@ default_section = "THIRDPARTY" [flake8] max-line-length = 88 -exclude = ".venv,__pycache__,build,dist" +exclude = [".venv", "__pycache__", "build", "dist", "static"] From acfe5a6c17cedbdeed4ed6beaf100b27fc23fb0e Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Thu, 21 Nov 2024 15:38:28 +0800 Subject: [PATCH 05/61] fixed; --- osgraph-service-py/pyproject.toml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/osgraph-service-py/pyproject.toml b/osgraph-service-py/pyproject.toml index 114fe2f..e8a6be2 100644 --- a/osgraph-service-py/pyproject.toml +++ b/osgraph-service-py/pyproject.toml @@ -36,8 +36,6 @@ line_length = 88 known_first_party = ["osgraph-service-py"] default_section = "THIRDPARTY" -[flake8] -max-line-length = 88 -exclude = [".venv", "__pycache__", "build", "dist", "static"] + From ab47b730907fc8f2e232ff4c082a678fd75e325c Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Thu, 21 Nov 2024 16:17:07 +0800 Subject: [PATCH 06/61] fixed; --- osgraph-service-py/app/__init__.py | 5 +- osgraph-service-py/app/dal/graph/tugraph.py | 9 +- osgraph-service-py/app/llm/openai_client.py | 11 -- .../app/managers/develop_activities.py | 5 +- .../app/managers/fulltext_search.py | 5 +- osgraph-service-py/app/managers/graph_list.py | 5 +- .../app/managers/os_interest.py | 5 +- osgraph-service-py/app/managers/os_partner.py | 112 +++++++++--------- .../app/managers/project_community.py | 5 +- .../app/managers/project_contribution.py | 5 +- .../app/managers/project_ecology.py | 5 +- osgraph-service-py/app/models/graph_view.py | 18 +-- osgraph-service-py/app/models/system_graph.py | 4 +- 13 files changed, 97 insertions(+), 97 deletions(-) diff --git a/osgraph-service-py/app/__init__.py b/osgraph-service-py/app/__init__.py index 491096b..3267803 100644 --- a/osgraph-service-py/app/__init__.py +++ b/osgraph-service-py/app/__init__.py @@ -1,6 +1,7 @@ # app/__init__.py import importlib import os +from typing import Type, Union from dotenv import load_dotenv from flask import Flask, jsonify @@ -15,7 +16,9 @@ load_dotenv() -def create_app(config_class: str = "config.ProductionConfig") -> Flask: +def create_app( + config_class: Union[str, Type[object]] = "config.ProductionConfig" +) -> Flask: app = Flask(__name__) app.config.from_object(config_class) setup_logger(app) diff --git a/osgraph-service-py/app/dal/graph/tugraph.py b/osgraph-service-py/app/dal/graph/tugraph.py index 8e2b377..66344f5 100644 --- a/osgraph-service-py/app/dal/graph/tugraph.py +++ b/osgraph-service-py/app/dal/graph/tugraph.py @@ -2,7 +2,7 @@ import json import os from dataclasses import asdict, dataclass, is_dataclass -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from dotenv import load_dotenv from flask import current_app @@ -77,7 +77,7 @@ def create_label(self, label: GraphLabel): except Exception as e: current_app.logger.info(f"Label '{label}' may already exist. {str(e)}") - def get_label(self, label_type: str, label_name: str) -> Dict[str, any]: + def get_label(self, label_type: str, label_name: str) -> Union[str, None]: try: with self.driver.session(database=self.graph_name) as session: if label_type == "vertex": @@ -90,9 +90,10 @@ def get_label(self, label_type: str, label_name: str) -> Dict[str, any]: current_app.logger.info( f"Faild to get {label_type} {label_name} . Errormessage: {str(e)}" ) + return None # 创建节点 - def create_vertex(self, label: str, properties: Dict[str, any]): + def create_vertex(self, label: str, properties: Dict[str, Any]): try: properties_str = self._convert_dict_to_str(properties) query = f""" @@ -347,7 +348,7 @@ def _convert_dict_to_str(self, properties: Any) -> str: return "" # 如果是 dataclass,则将其转换为字典 - if is_dataclass(properties): + if is_dataclass(properties) and not isinstance(properties, type): properties = asdict(properties) def convert_value(value: Any) -> str: diff --git a/osgraph-service-py/app/llm/openai_client.py b/osgraph-service-py/app/llm/openai_client.py index cb30624..e0c1411 100644 --- a/osgraph-service-py/app/llm/openai_client.py +++ b/osgraph-service-py/app/llm/openai_client.py @@ -38,14 +38,3 @@ def summary_graph( ): prompt = get_graph_summary_prompt(graph_data=graph_data) return self.ask_question(prompt) - - -# 示例用法 -if __name__ == "__main__": - api_key = "your-openai-api-key" - client = OpenAIClient(api_key) - - question = "什么是Python的主要用途?" - context = "请简要回答关于编程语言的问题。" - answer = client.ask_question(question, context) - print("回答:", answer) diff --git a/osgraph-service-py/app/managers/develop_activities.py b/osgraph-service-py/app/managers/develop_activities.py index f015f14..f191d34 100644 --- a/osgraph-service-py/app/managers/develop_activities.py +++ b/osgraph-service-py/app/managers/develop_activities.py @@ -1,7 +1,7 @@ # app/manager/develop_activities.py import json import os -from typing import Any, Dict +from typing import Any, Dict, Union from app.models.graph_view import Graph, Push, Repo, User from app.services.graph_services.develop_activities import DevelopActivitiesService @@ -11,7 +11,7 @@ class DevelopActivitiesManager: def __init__(self) -> None: pass - def get_graph(self, data: Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Union[Dict, None]: service = DevelopActivitiesService() graph = Graph() result = service.execute(data=data) @@ -91,3 +91,4 @@ def get_graph(self, data: Dict[str, Any]) -> Dict | None: if summary: graph.update_summary(summary=summary) return graph.to_dict() + return None diff --git a/osgraph-service-py/app/managers/fulltext_search.py b/osgraph-service-py/app/managers/fulltext_search.py index 3c602c3..dabd88b 100644 --- a/osgraph-service-py/app/managers/fulltext_search.py +++ b/osgraph-service-py/app/managers/fulltext_search.py @@ -1,5 +1,5 @@ # app/manager/fulltext_search.py -from typing import List +from typing import List, Union from app.services.fulltext_search import FulltextSearchService @@ -8,8 +8,9 @@ class FulltextSearchManager: def __init__(self) -> None: pass - def search(self, data) -> List | None: + def search(self, data) -> Union[List, None]: service = FulltextSearchService() result = service.execute(data=data) if result: return result + return None diff --git a/osgraph-service-py/app/managers/graph_list.py b/osgraph-service-py/app/managers/graph_list.py index 36d0cd4..6abfc0c 100644 --- a/osgraph-service-py/app/managers/graph_list.py +++ b/osgraph-service-py/app/managers/graph_list.py @@ -1,5 +1,5 @@ # app/manager/graph_list.py -from typing import List +from typing import List, Union from app.services.graph_list import GraphListService @@ -8,7 +8,7 @@ class GraphListManager: def __init__(self) -> None: pass - def get_graph_list(self) -> List | None: + def get_graph_list(self) -> Union[List, None]: service = GraphListService() graph_list: List = [] result = service.execute() @@ -16,3 +16,4 @@ def get_graph_list(self) -> List | None: for item in result: graph_list.append(item["n"]) return graph_list + return None diff --git a/osgraph-service-py/app/managers/os_interest.py b/osgraph-service-py/app/managers/os_interest.py index 3375c3b..a938330 100644 --- a/osgraph-service-py/app/managers/os_interest.py +++ b/osgraph-service-py/app/managers/os_interest.py @@ -1,7 +1,7 @@ # app/manager/os_interest.py import json import os -from typing import Any, Dict +from typing import Any, Dict, Union from app.models.graph_view import Belong, ContributeRepo, Graph, Repo, Topic, User from app.services.graph_services.os_interest import OSInterestService @@ -11,7 +11,7 @@ class OSInterestManager: def __init__(self) -> None: pass - def get_graph(self, data: Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Union[Dict, None]: service = OSInterestService() graph = Graph() result = service.execute(data=data) @@ -71,3 +71,4 @@ def get_graph(self, data: Dict[str, Any]) -> Dict | None: if summary: graph.update_summary(summary=summary) return graph.to_dict() + return None diff --git a/osgraph-service-py/app/managers/os_partner.py b/osgraph-service-py/app/managers/os_partner.py index 9e83ae2..4099ee7 100644 --- a/osgraph-service-py/app/managers/os_partner.py +++ b/osgraph-service-py/app/managers/os_partner.py @@ -1,7 +1,7 @@ # app/manager/os_partner.py import json import os -from typing import Any, Dict +from typing import Any, Dict, Union from app.models.graph_view import ( CommonIssue, @@ -18,69 +18,69 @@ class OSPartnerManager: def __init__(self) -> None: pass - def get_graph(self, data: Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Union[Dict, None]: service = OSPartnerService() graph = Graph() result = service.execute(data=data) if result: - if result: - for data in result: - start_node = json.loads(data["start_node"]) - relationship = json.loads(data["relationship"]) - end_node = json.loads(data["end_node"]) - if start_node["type"] == "github_user": - src_user = User( - id=start_node["id"], name=start_node["properties"]["name"] - ) - graph.insert_entity(src_user) + for data in result: + start_node = json.loads(data["start_node"]) + relationship = json.loads(data["relationship"]) + end_node = json.loads(data["end_node"]) + if start_node["type"] == "github_user": + src_user = User( + id=start_node["id"], name=start_node["properties"]["name"] + ) + graph.insert_entity(src_user) - if end_node["type"] == "github_user": - tar_user = User( - id=end_node["id"], name=end_node["properties"]["name"] - ) - graph.insert_entity(tar_user) + if end_node["type"] == "github_user": + tar_user = User( + id=end_node["id"], name=end_node["properties"]["name"] + ) + graph.insert_entity(tar_user) - if end_node["type"] == "common_issue": - common_issue = CommonIssue( - sid=relationship["src"], - tid=relationship["dst"], - id=relationship["id"], - count=relationship["properties"]["count"], - ) - graph.insert_entity(common_issue) + if relationship["type"] == "common_issue": + common_issue = CommonIssue( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) + graph.insert_relationship(common_issue) - if relationship["type"] == "common_pr": - common_pr = CommonPR( - sid=relationship["src"], - tid=relationship["dst"], - id=relationship["id"], - count=relationship["properties"]["count"], - ) - graph.insert_relationship(common_pr) + if relationship["type"] == "common_pr": + common_pr = CommonPR( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) + graph.insert_relationship(common_pr) - if relationship["type"] == "common_star": - common_star = CommonStar( - sid=relationship["src"], - tid=relationship["dst"], - id=relationship["id"], - count=relationship["properties"]["count"], - ) - graph.insert_relationship(common_star) + if relationship["type"] == "common_star": + common_star = CommonStar( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) + graph.insert_relationship(common_star) - if relationship["type"] == "common_repo": - common_repo = CommonRepo( - sid=relationship["src"], - tid=relationship["dst"], - id=relationship["id"], - count=relationship["properties"]["count"], - ) - graph.insert_relationship(common_repo) + if relationship["type"] == "common_repo": + common_repo = CommonRepo( + sid=relationship["src"], + tid=relationship["dst"], + id=relationship["id"], + count=relationship["properties"]["count"], + ) + graph.insert_relationship(common_repo) - if os.getenv("SUMMARY_GRAPH") == "on": - from app.services.graph_summary import GraphSummaryService + if os.getenv("SUMMARY_GRAPH") == "on": + from app.services.graph_summary import GraphSummaryService - summary_service = GraphSummaryService() - summary = summary_service.execute(data=graph.to_dict()) - if summary: - graph.update_summary(summary=summary) - return graph.to_dict() + summary_service = GraphSummaryService() + summary = summary_service.execute(data=graph.to_dict()) + if summary: + graph.update_summary(summary=summary) + return graph.to_dict() + return None diff --git a/osgraph-service-py/app/managers/project_community.py b/osgraph-service-py/app/managers/project_community.py index 27f73bf..6fcc9a0 100644 --- a/osgraph-service-py/app/managers/project_community.py +++ b/osgraph-service-py/app/managers/project_community.py @@ -1,7 +1,7 @@ # app/manager/project_community.py import json import os -from typing import Any, Dict +from typing import Any, Dict, Union from app.models.graph_view import ( Belong, @@ -19,7 +19,7 @@ class ProjectCommunityManager: def __init__(self) -> None: pass - def get_graph(self, data: Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Union[Dict, None]: service = ProjectCommunityService() graph = Graph() result = service.execute(data=data) @@ -97,3 +97,4 @@ def get_graph(self, data: Dict[str, Any]) -> Dict | None: if summary: graph.update_summary(summary=summary) return graph.to_dict() + return None diff --git a/osgraph-service-py/app/managers/project_contribution.py b/osgraph-service-py/app/managers/project_contribution.py index 4fd20c5..3a4fee9 100644 --- a/osgraph-service-py/app/managers/project_contribution.py +++ b/osgraph-service-py/app/managers/project_contribution.py @@ -1,7 +1,7 @@ # app/manager/project_contribution.py import json import os -from typing import Any, Dict +from typing import Any, Dict, Union from app.models.graph_view import ( CodeReviewAction, @@ -20,7 +20,7 @@ class ProjectContributionManager: def __init__(self) -> None: pass - def get_graph(self, data: Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Union[Dict, None]: service = ProjectContributionService() graph = Graph() result = service.execute(data=data) @@ -94,3 +94,4 @@ def get_graph(self, data: Dict[str, Any]) -> Dict | None: if summary: graph.update_summary(summary=summary) return graph.to_dict() + return None diff --git a/osgraph-service-py/app/managers/project_ecology.py b/osgraph-service-py/app/managers/project_ecology.py index 5c4356c..fd2769d 100644 --- a/osgraph-service-py/app/managers/project_ecology.py +++ b/osgraph-service-py/app/managers/project_ecology.py @@ -1,7 +1,7 @@ # app/manager/project_ecology.py import json import os -from typing import Any, Dict +from typing import Any, Dict, Union from app.models.graph_view import Belong, CommonDevelop, Graph, Orgnization, Repo from app.services.graph_services.project_ecology import ProjectEcologyService @@ -11,7 +11,7 @@ class ProjectEcologyManager: def __init__(self) -> None: pass - def get_graph(self, data: Dict[str, Any]) -> Dict | None: + def get_graph(self, data: Dict[str, Any]) -> Union[Dict, None]: service = ProjectEcologyService() graph = Graph() result = service.execute(data=data) @@ -63,3 +63,4 @@ def get_graph(self, data: Dict[str, Any]) -> Dict | None: if summary: graph.update_summary(summary=summary) return graph.to_dict() + return None diff --git a/osgraph-service-py/app/models/graph_view.py b/osgraph-service-py/app/models/graph_view.py index 2124442..91f1837 100644 --- a/osgraph-service-py/app/models/graph_view.py +++ b/osgraph-service-py/app/models/graph_view.py @@ -1,7 +1,7 @@ # app/models/graph_view.py from dataclasses import asdict, dataclass, field -from typing import List, Literal, Optional +from typing import Any, Dict, List, Literal, Optional @dataclass @@ -90,7 +90,7 @@ def filter_relationships(self, **criteria): if all(getattr(edge, key) == value for key, value in criteria.items()) ] - def to_dict(self) -> str: + def to_dict(self) -> Dict[str, Any]: graph_dict = { "vertices": [v.to_dict() for v in self.vertices], "edges": [e.to_dict() for e in self.edges], @@ -155,27 +155,27 @@ class Create(Edge): @dataclass -class CreatePR(Create): +class CreatePR(Edge): name: Literal["创建 PR"] = field(default="创建 PR", init=False) @dataclass -class CreateIssue(Create): +class CreateIssue(Edge): name: Literal["创建 Issue"] = field(default="创建 Issue", init=False) @dataclass -class CreateCommit(Create): +class CreateCommit(Edge): name: Literal["创建 Commit"] = field(default="创建 Commit", init=False) @dataclass -class CreateComment(Create): +class CreateComment(Edge): name: Literal["创建 Comment"] = field(default="创建 Comment", init=False) @dataclass -class CreateCR(Create): +class CreateCR(Edge): name: Literal["创建 CR"] = field(default="创建 CR", init=False) @@ -200,12 +200,12 @@ class PullRequestAction(Edge): @dataclass -class Push(PullRequestAction): +class Push(Edge): name: Literal["推送"] = field(default="推送", init=False) @dataclass -class CommitAction(PullRequestAction): +class CommitAction(Edge): name: Literal["提交"] = field(default="提交", init=False) diff --git a/osgraph-service-py/app/models/system_graph.py b/osgraph-service-py/app/models/system_graph.py index 99536af..b56929b 100644 --- a/osgraph-service-py/app/models/system_graph.py +++ b/osgraph-service-py/app/models/system_graph.py @@ -1,6 +1,6 @@ # app/models/system_graph.py - from dataclasses import dataclass +from typing import Optional @dataclass @@ -15,7 +15,7 @@ class GraphService: label: str = "graph_service" primary: str = "name" type: str = "VERTEX" - props: GraphServiceProps = None + props: Optional[GraphServiceProps] = None def __init__(self, name: str, comment: str, input_types: str, filter_keys: str): self.props = GraphServiceProps( From 093c146a242d0e3aa94c9fe2b9bce1465a75bffe Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Thu, 21 Nov 2024 18:06:50 +0800 Subject: [PATCH 07/61] fixed; --- osgraph-service-py/app/__init__.py | 7 ++++++- osgraph-service-py/app/dal/graph/tugraph.py | 1 - osgraph-service-py/app/models/system_graph.py | 3 +-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/osgraph-service-py/app/__init__.py b/osgraph-service-py/app/__init__.py index 3267803..4af8100 100644 --- a/osgraph-service-py/app/__init__.py +++ b/osgraph-service-py/app/__init__.py @@ -88,7 +88,12 @@ def initialize_system_graph(app: Flask): type=GraphService.type, properties=[ LabelProps(name=key, type="string", optional=True) - for key in vars(GraphService.props).keys() + for key in ( + GraphService.props.keys() + if isinstance(GraphService.props, dict) + else dir(GraphService.props) + ) + if not key.startswith("_") ], ) client.create_label(label) diff --git a/osgraph-service-py/app/dal/graph/tugraph.py b/osgraph-service-py/app/dal/graph/tugraph.py index 66344f5..c5dc080 100644 --- a/osgraph-service-py/app/dal/graph/tugraph.py +++ b/osgraph-service-py/app/dal/graph/tugraph.py @@ -29,7 +29,6 @@ class GraphLabel: properties: Optional[List[LabelProps]] = None def to_dict(self) -> Dict[str, Any]: - # 遍历属性列表,找到与 primary 匹配的属性并设置 optional 和 index if self.properties: for prop in self.properties: if prop.name == self.primary: diff --git a/osgraph-service-py/app/models/system_graph.py b/osgraph-service-py/app/models/system_graph.py index b56929b..8e3a439 100644 --- a/osgraph-service-py/app/models/system_graph.py +++ b/osgraph-service-py/app/models/system_graph.py @@ -1,6 +1,5 @@ # app/models/system_graph.py from dataclasses import dataclass -from typing import Optional @dataclass @@ -15,7 +14,7 @@ class GraphService: label: str = "graph_service" primary: str = "name" type: str = "VERTEX" - props: Optional[GraphServiceProps] = None + props: GraphServiceProps = GraphServiceProps() def __init__(self, name: str, comment: str, input_types: str, filter_keys: str): self.props = GraphServiceProps( From f1900080071f94e657d0eb1ba5d24f795e254fa3 Mon Sep 17 00:00:00 2001 From: KingSkyLi <15566300566@163.com> Date: Fri, 22 Nov 2024 14:22:05 +0800 Subject: [PATCH 08/61] add render graph --- osgraph-service-py/.gitignore | 1 + osgraph-service-py/app/routes/render_graph.py | 181 +++++++++ osgraph-service-py/poetry.lock | 369 +++++++++++++++++- osgraph-service-py/pyproject.toml | 2 + osgraph-service-py/run.py | 6 + 5 files changed, 558 insertions(+), 1 deletion(-) create mode 100644 osgraph-service-py/app/routes/render_graph.py diff --git a/osgraph-service-py/.gitignore b/osgraph-service-py/.gitignore index 67f0865..546da84 100644 --- a/osgraph-service-py/.gitignore +++ b/osgraph-service-py/.gitignore @@ -17,3 +17,4 @@ __pycache__/ .vscode/ .idea/ .DS_Store +.certs/ diff --git a/osgraph-service-py/app/routes/render_graph.py b/osgraph-service-py/app/routes/render_graph.py new file mode 100644 index 0000000..6116eb0 --- /dev/null +++ b/osgraph-service-py/app/routes/render_graph.py @@ -0,0 +1,181 @@ +import os +import json +import uuid +import logging +import time +import requests +from io import BytesIO +from flask import Blueprint, send_file, after_this_request, request, jsonify +from selenium import webdriver +from selenium.webdriver.chrome.options import Options +from selenium.webdriver.chrome.service import Service +import tempfile + +render_graph_bp = Blueprint("render_graph", __name__, url_prefix="/api/graph") +logger = logging.getLogger(__name__) +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +RENDER_DIR = os.path.join(BASE_DIR, "../render") + +os.makedirs(RENDER_DIR, exist_ok=True) + +def get_graph_data(target_url:str)->dict: + try: + response = requests.get(target_url) + response.raise_for_status() + target_data = response.json() + except requests.RequestException as e: + return jsonify({'error': 'Failed to fetch data', 'details': str(e)}), 500 + if "data" in target_data: + graph_data = target_data["data"] + else: + return {'error': 'No data found in response'}, 400 + + processed_data = process_graph_data(graph_data) + + return processed_data + +def process_graph_data(graph_data): + vertices = graph_data.get("vertices", []) + edges = graph_data.get("edges", []) + id_map = {} + nodes = [] + for vertex in vertices: + node_id = f"node{vertex['id']}" + id_map[vertex['id']] = node_id + nodes.append({ + "id": node_id, + "label": vertex["name"] if vertex["name"] else f"Unnamed {vertex['id']}" + }) + + converted_edges = [] + for edge in edges: + converted_edges.append({ + "source": id_map.get(edge["sid"], f"node{edge['sid']}"), + "target": id_map.get(edge["tid"], f"node{edge['tid']}") + }) + + result = { + "nodes": nodes, + "edges": converted_edges + } + return result + +def generate_html_content(processed_data): + nodes_json = json.dumps(processed_data["nodes"]) + edges_json = json.dumps(processed_data["edges"]) + + html_content = f""" + + +
+ + +