diff --git a/.github/workflows/python_basic_check.yaml b/.github/workflows/python_basic_check.yaml new file mode 100644 index 00000000..18e4baca --- /dev/null +++ b/.github/workflows/python_basic_check.yaml @@ -0,0 +1,9 @@ +name: Develop Branch Action +on: + pull_request_target: + types: + - opened + branches: + - main + paths: + - 'codebase_understanding/**' \ No newline at end of file diff --git a/.gitignore b/.gitignore index 16b69af2..d8a521a5 100755 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,13 @@ codebase_understanding/dependencies/analysers codebase_understanding/nodeparser/__pycache__ codebase_understanding/nodeparser/tests/__pycache__ codebase_understanding/codeagent/__pycache__ +unoplat-code-confluence/codeagent/__pycache__ +unoplat-code-confluence/codebaseparser/__pycache__ +unoplat-code-confluence/data_models/__pycache__ +unoplat-code-confluence/downloader/__pycache__ +unoplat-code-confluence/loader/__pycache__ +unoplat-code-confluence/nodeparser/__pycache__ +unoplat-code-confluence/nodeparser/tests/__pycache__ +unoplat-code-confluence/settings/__pycache__ +unoplat-code-confluence/utility/__pycache__ +unoplat-code-confluence/data_models/dspy/__pycache__ diff --git a/README.md b/README.md old mode 100755 new mode 100644 index d3e24e1d..f01d963a --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +<<<<<<< HEAD +======= # Unoplat-CodeConfluence - Where Code Meets Clarity @@ -223,3 +225,4 @@ These are the people because of which this work has been possible. Unoplat code +>>>>>>> main diff --git a/codebase_understanding/data_models/chapi_unoplat_fieldmodel.py b/codebase_understanding/data_models/chapi_unoplat_fieldmodel.py deleted file mode 100644 index d67be1d9..00000000 --- a/codebase_understanding/data_models/chapi_unoplat_fieldmodel.py +++ /dev/null @@ -1,9 +0,0 @@ -from pydantic import BaseModel, Field -from typing import Optional - - - -class FieldModel(BaseModel): - type_type: Optional[str] = Field(default=None, alias="TypeType") - type_value: Optional[str] = Field(default=None, alias="TypeValue") - type_key: Optional[str] = Field(default=None, alias="TypeKey") diff --git a/codebase_understanding/loader/parse_json.py b/codebase_understanding/loader/parse_json.py deleted file mode 100644 index 433c3cb1..00000000 --- a/codebase_understanding/loader/parse_json.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import List -from pydantic import ValidationError -from loader.iparse_json import IParseJson -from data_models.chapi_unoplat_node import Node -from loguru import logger -from nodeparser.isummariser import ISummariser - -class JsonParser(IParseJson): - def parse_json_to_nodes(self, json_data: dict,isummariser:ISummariser) -> List[Node]: - """Concrete implementation of the parse_json_to_nodes method.""" - nodes = [] - for item in json_data: - try: - node = Node(**item) - #Only summarise if node type is class - #TODO: going forward might require other constructs too like interface, abstract class for better intellisense etc - if node.type == "CLASS": - node = isummariser.summarise_node(node) - nodes.append(node) - except ValidationError as e: - logger.error(f"Error validating node: {e}") - return nodes \ No newline at end of file diff --git a/LICENSE b/unoplat-code-confluence/LICENSE similarity index 100% rename from LICENSE rename to unoplat-code-confluence/LICENSE diff --git a/unoplat-code-confluence/README.md b/unoplat-code-confluence/README.md new file mode 100644 index 00000000..38824f3e --- /dev/null +++ b/unoplat-code-confluence/README.md @@ -0,0 +1,216 @@ +# Unoplat-CodeConfluence - Where Code Meets Clarity + + +## Current Problem with doing Repository level Documentation using AI Tooling + +### Process Overview: + +1. Indexing Code Files: All code files are indexed into a vector database using embeddings that capture the semantic meaning of the code. +2. Query Processing: The system uses fine-tuned language models to interpret the user's query about the codebase. +3. Retrieval-Augmented Generation: The language model performs RAG to fetch relevant code snippets or documentation based on the query. +4. Reranking: Retrieved results are reranked to present the most relevant information first based on the context and specifics of the query. + +### Challenges: + +1. Limited Context Windows: Most AI tools suffer from limited context windows of large language models, which can hinder their ability to process large blocks of code or extended documentation effectively. +2. Lack of Long-term Memory: These tools generally do not incorporate long-term memory, which affects their ability to remember past interactions or understand extensive codebases deeply. + +3. Inefficiency: This process can be computationally expensive and slow, particularly for large codebases, due to the extensive indexing and complex querying mechanisms. +4. Cost: The operational costs can be significant because of the resources required for maintaining up-to-date embeddings and processing queries with advanced AI models. +5. Compliance and Security Issues: Storing and processing entire codebases can lead to compliance issues, especially with code that contains sensitive or proprietary information. +6. First Principles Concern: The approach may not align with first principles of software engineering, which emphasize simplicity and minimizing complexity across programming languages constructs and frameworks. + +### Mermaid Diagram of the Process: +Here's a visual representation of the process using a Mermaid diagram: + +```mermaid +graph LR + A[Start] --> B[Index Code Files] + B --> C[Process Query] + C --> D[Retrieve Relevant Data] + D --> E[Rerank Results] + E --> F[Present Results] + F --> G[End] +``` +This diagram helps visualize the workflow from the start of the query to the presentation of results, illustrating the steps where inefficiencies and complexities arise. + +### Unoplat Solution to all of these problems + +#### Unoplat Solution: Deterministic Information Ingestion for Enhanced Code Understanding +The Unoplat approach offers a significant shift from the conventional AI-powered tools by opting for a deterministic method to manage and understand codebases. Here’s an overview of how Unoplat proposes to resolve the inefficiencies of current AI-powered code assistance tools: + +#### Process Overview: + +1. Language-Agnostic Parsing: Unoplat uses a language-agnostic parser, similar to generic compilers, to analyze and interpret any programming language or framework. This step involves no AI, focusing solely on deterministic parsing methods. +2. Generating Semi-Structured JSON: From the parsing step, Unoplat generates semi-structured JSON data. This JSON captures essential constructs and elements of the programming languages being analyzed, providing a clear, structured view of the codebase without reliance on AI for code understanding. +3. Enhancing Metadata: The semi-structured JSON is then used to enhance the metadata in a single attribute with help of oss instruct model. +4. Integration with Open Source LLMs: Leveraging open-source large language models (LLMs), Unoplat combines the enriched metadata with multi-agentic workflows. This integration aims to produce a more sophisticated and useful "Code Atlas," which developers can use to navigate and understand large and complex codebases more effectively. +5. Output: The output is a highly detailed, easily navigable representation of the codebase, allowing developers to understand and modify code with much higher accuracy and speed than traditional AI-based tools. + +#### Benefits: +1. Deterministic and Transparent: The deterministic nature of the process ensures transparency and reliability in how code is analyzed and understood. +2. Cost-Effective: Reduces the dependency on expensive AI models and the associated computational and maintenance costs. +3. Compliance and Security: By not relying on AI models trained on external data, Unoplat minimizes potential compliance and security issues. +4. Scalability: The approach is highly scalable, as it can handle any programming language or framework without needing specific model training. +##### Mermaid Diagram of the Process: +Here’s a visual representation using a Mermaid diagram to illustrate the Unoplat process: + +```mermaid +graph TD + A[Start] --> B[Language-Agnostic Parsing] + B --> C[Generate Semi-Structured JSON] + C --> D[Enhance Metadata] + D --> E[Integrate with Open Source LLMs] + E --> F[Generate Enhanced Code Atlas] + F --> G[End] +``` +This diagram outlines the Unoplat process from the initial parsing of the codebase to the generation of an enhanced Code Atlas, highlighting the deterministic and structured approach to managing and understanding codebases. + +## Unoplat Solution to the Current Problem +```mermaid +flowchart TD + Start(Unoplat GUI/Terminal Experience) + Parse[Parse Java Codebase] + CHAP[Common Hierarchical Abstract Parser] + IC[Information Converter] + Archguard[Archguard] + Output[Semi-structured JSON - Class Metadata] + Litellm[litellm using sota oss llm for reasoning- phi3-14b-instruct] + Finer_Summary[Finer Summary per Class] + CrewAI_Manager_Agent[Manager_Crewai] + Data_Engineer_Agent[Data Engineer - Job is to provide per class unoplat markdown spec] + Unoplat_Custom_Tool[Custom Tool - Fetch Finer summary one at a time until end of items using long term memory] + Software_Engineer_Agent[Software_Engineer_CrewAi - Clean up markdown] + Senior_Software_Engineer_Agent[Senior Software Engineer- Adjust/Modify overall summary based on current summary] + Senior_Markdown_Technical_Documentation_Specialist[Unoplat Markdown tech doc specialist- Analyze the evolving summary for accuracy and insights based on all available classes' metadata and include flow/interactions within the codebases between classes.] + MarkdDownOutput[MarkdDownOutput] + + Start --> Parse + Parse --> CHAP & IC & Archguard + CHAP & IC & Archguard --> Output + Output --> Litellm + Litellm --> Finer_Summary + Finer_Summary --> CrewAI_Manager_Agent + CrewAI_Manager_Agent --> Data_Engineer_Agent + Data_Engineer_Agent --> Unoplat_Custom_Tool + Unoplat_Custom_Tool --> Software_Engineer_Agent + Software_Engineer_Agent --> Senior_Software_Engineer_Agent + Senior_Software_Engineer_Agent --> Senior_Markdown_Technical_Documentation_Specialist + Senior_Markdown_Technical_Documentation_Specialist --> MarkdDownOutput +``` + + + +## Example: + +### Input: +``` +Local workspace from https://github.com/DataStax-Examples/spring-data-starter.git +``` + +### Output: +``` +# Order Class + +- **Package**: `com.datastax.examples.order` +- **File Path**: `src/main/java/com/datastax/examples/order/Order.java` +- **Responsibility**: This class represents an order in the system, encapsulating all necessary details such as product quantity, name, price, and added-to-order timestamp. + +## Fields + +Each field corresponds to a column of our Cassandra database table. The annotations indicate how each Java data type is mapped to its respective datatype in Cassandra. + +- **OrderPrimaryKey**: `None` + - **Type**: This field represents the unique identifier for an order within the system, serving as the primary key. No dependencies are injected here. + +- **Integer** + - **type_key**: `productQuantity` + - **Type**: Represents the quantity of a product in this particular order. Annotated with `@Column("product_quantity")` and `@CassandraType(type = CassandraType.Name.INT)` to map it properly within our database structure. No dependencies are injected here. + +- **String** + - **type_key**: `productName` + - **Type**: Stores the name of a product in this order. Annotated with `@Column("product_name")` and `@CassandraType(type = CassandraType.Name.TEXT)` to ensure accurate representation in our database schema. No dependencies are injected here. + +- **Float** + - **type_key**: `productPrice` + - **Type**: Contains the price of a product within this order. Annotated with `@CassandraType(type = CassandraType.Name.DECIMAL)` to map it correctly in our database system. No dependencies are injected here. + +- **Instant** + - **type_key**: `addedToOrderTimestamp` + - **Type**: Stores the timestamp of when this order was added to the system. Annotated with `@CassandraType(type = CassandraType.Name.TIMESTAMP)` for accurate mapping in our database schema. No dependencies are injected here. + +# OrderController Class Summary + +## Package +com.datastax.examples.order + +## File Path +src/main/java/com.datastax.examples/order/OrderController.java + +## Fields +- **OrderRepository** (private OrderRepository orderRepository) + - **Type**: The field is an instance of the OrderRepository class, which contains methods for accessing and manipulating data from a database using JPA or similar technologies. It serves as a dependency injection to enable interaction with the repository layer inside the controller's methods. + +## Methods +- **root()**: `ModelAndView` + - **Summary**: Returns a ModelAndView object representing the root page of the order management system, which typically includes links or navigation elements for other pages/actions within the application. + +- **createOrder(Request req, Response res)**: `Order` + - **Summary**: Processes an HTTP POST request to create a new Order object with data from the client's input and saves it in the database using the repository layer. It then returns the created order as the response payload. + +- **updateOrder(UUID id, Request req, Response res)**: `Order` + - **Summary**: Processes an HTTP PUT or PATCH request to update a specific Order object identified by its UUID with new data from the client's input and saves it in the database using the repository layer. It then returns the updated order as the response payload. + +- **deleteOrder(UUID id)**: `void` + - **Summary**: Processes an HTTP DELETE request to remove a specific Order object identified by its UUID from the database and handles any related cleanup or cascading deletions using the repository layer. It does not return any response payload. + +# OrderPrimaryKey Class Summary + +## Package +`com.datastax.examples.order` + +## File Path +`src/main/java/com/datastax/examples/order/OrderPrimaryKey.java` + +## Responsibility +This class represents the primary key for an Order entity, containing UUID fields to uniquely identify each order and its associated product within a Cassandra database. + +## Fields + +- **UUID**: `orderId` + - **Type**: Represents the unique identifier of the order itself. It is marked with `@PrimaryKeyColumn(name = "order_id", ordinal = 0, type = PrimaryKeyType.PARTITIONED)` to denote its role as a partition key in Cassandra's primary key structure. + +- **UUID**: `productId` + - **Type**: Represents the unique identifier of the product associated with the order. This is also marked with `@PrimaryKeyColumn(name = "product_id", ordinal = 1, type = PrimaryKeyType.CLUSTERED)` indicating that it serves as a clustering key in Cassandra's primary key scheme, which further refines the data retrieval within each partition identified by `orderId`. + +(No methods are defined for this class.) +``` +## Current Stage + +### Status: Alpha +### Blockers before user adoption: +1. Performance issue with per class summary [Dspy based pipelines in progress with finetuned data] +2. Multi agent workflow not exiting due to potential enhancements needed in our crewai implementation. +3. Moving to cli from tui. [DONE] + +## Tech Stack + +1. [Chapi](https://chapi.phodal.com/) +2. [PyTermGui](https://ptg.bczsalba.com/) +3. [Litellm](https://docs.litellm.ai/docs/) +4. [ArchGuard](https://github.com/archguard/archguard) +5. [CrewAi](https://www.crewai.com/) +6. [loguru](https://loguru.readthedocs.io/en/stable/api/logger.html) +7. [PyTest](https://pytest.org/) +8. [Pydantic](https://www.pydantic.dev) +9. DSPY + + +## Credits/heroes/supporters + +These are the people because of which this work has been possible. Unoplat code confluence would not exist without them. +1. [Phodal from Chapi and ArcGuard](https://github.com/phodal) +2. [Ishaan & Krrish from Litellm](ishaan@berri.ai / krrish@berri.ai) +3. [Joao Moura from crewai](https://github.com/joaomdmoura) +4. [Vipin Shreyas Kumar](https://github.com/vipinshreyaskumar) +5. [Apeksha](https://github.com/apekshamehta) \ No newline at end of file diff --git a/unoplat-code-confluence/__init__.py b/unoplat-code-confluence/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/codebase_understanding/main.py b/unoplat-code-confluence/__main__.py similarity index 85% rename from codebase_understanding/main.py rename to unoplat-code-confluence/__main__.py index ddc1758d..446cecfd 100644 --- a/codebase_understanding/main.py +++ b/unoplat-code-confluence/__main__.py @@ -6,6 +6,7 @@ import datetime from codebaseparser.ArchGuardHandler import ArchGuardHandler import re +from data_models.chapi_unoplat_codebase import UnoplatCodebase from downloader.downloader import Downloader from loader import iload_json, iparse_json from loader.json_loader import JsonLoader @@ -16,8 +17,8 @@ def main(iload_json, iparse_json,isummariser,json_configuration_data): - settings = AppSettings() - get_codebase_metadata(json_configuration_data,settings,iload_json,iparse_json,isummariser) + #settings = AppSettings() + get_codebase_metadata(json_configuration_data,iload_json,iparse_json,isummariser) def handle_toggle(value): @@ -26,7 +27,7 @@ def handle_toggle(value): logger.info(f"Selected language: {value}") -def get_codebase_metadata(json_configuration_data,settings,iload_json,iparse_json,isummariser): +def get_codebase_metadata(json_configuration_data,iload_json,iparse_json,isummariser): # Collect necessary inputs from the user to set up the codebase indexing local_workspace_path = json_configuration_data["local_workspace_path"] programming_language = json_configuration_data["programming_language"] @@ -44,7 +45,6 @@ def get_codebase_metadata(json_configuration_data,settings,iload_json,iparse_jso programming_language, output_path_field, codebase_name_field, - settings, github_token, arcguard_cli_repo, local_download_directory, @@ -83,7 +83,7 @@ def ensure_jar_downloaded(github_token,arcguard_cli_repo,local_download_director return jar_path -def start_parsing(local_workspace_path, programming_language, output_path, codebase_name, settings, github_token, arcguard_cli_repo, local_download_directory, iload_json, iparse_json, isummariser): +def start_parsing(local_workspace_path, programming_language, output_path, codebase_name, github_token, arcguard_cli_repo, local_download_directory, iload_json, iparse_json, isummariser): # Log the start of the parsing process logger.info("Starting parsing process...") @@ -108,15 +108,20 @@ def start_parsing(local_workspace_path, programming_language, output_path, codeb chapi_metadata_path = archguard_handler.run_scan() chapi_metadata = iload_json.load_json_from_file(chapi_metadata_path) + current_timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") output_filename = f"{codebase_name}_{current_timestamp}.md" - with open(os.path.join(output_path, output_filename), 'a+') as md_file: - for node in iparse_json.parse_json_to_nodes(chapi_metadata, isummariser): - if node.type == "CLASS": - md_file.write(f"{node.summary}\n\n") + unoplat_codebase : UnoplatCodebase = iparse_json.parse_json_to_nodes(chapi_metadata, isummariser) + + print(unoplat_codebase.model_dump()) + + # with open(os.path.join(output_path, output_filename), 'a+') as md_file: + # for node in iparse_json.parse_json_to_nodes(chapi_metadata, isummariser): + # if node.type == "CLASS": + # md_file.write(f"{node.summary}\n\n") # with open('codebase_summary.json', 'w') as file: # json.dump(codebase_metadata, file) @@ -135,6 +140,7 @@ def start_parsing(local_workspace_path, programming_language, output_path, codeb isummariser = NodeSummariser() #loading the config json_configuration_data = iload_json.load_json_from_file(args.config) + print(json_configuration_data) #loading and setting the logging config logging_config = iload_json.load_json_from_file("loguru.json") diff --git a/codebase_understanding/codeagent/__init__.py b/unoplat-code-confluence/codeagent/__init__.py similarity index 100% rename from codebase_understanding/codeagent/__init__.py rename to unoplat-code-confluence/codeagent/__init__.py diff --git a/codebase_understanding/codeagent/current_item.py b/unoplat-code-confluence/codeagent/current_item.py similarity index 100% rename from codebase_understanding/codeagent/current_item.py rename to unoplat-code-confluence/codeagent/current_item.py diff --git a/codebase_understanding/codeagent/unoplat_agent.py b/unoplat-code-confluence/codeagent/unoplat_agent.py similarity index 100% rename from codebase_understanding/codeagent/unoplat_agent.py rename to unoplat-code-confluence/codeagent/unoplat_agent.py diff --git a/codebase_understanding/codeagent/unoplat_custom_current_item_tool.py b/unoplat-code-confluence/codeagent/unoplat_custom_current_item_tool.py similarity index 100% rename from codebase_understanding/codeagent/unoplat_custom_current_item_tool.py rename to unoplat-code-confluence/codeagent/unoplat_custom_current_item_tool.py diff --git a/codebase_understanding/codebase_overview_spec.md b/unoplat-code-confluence/codebase_overview_spec.md similarity index 100% rename from codebase_understanding/codebase_overview_spec.md rename to unoplat-code-confluence/codebase_overview_spec.md diff --git a/codebase_understanding/codebase_summary.json b/unoplat-code-confluence/codebase_summary.json similarity index 100% rename from codebase_understanding/codebase_summary.json rename to unoplat-code-confluence/codebase_summary.json diff --git a/unoplat-code-confluence/codebase_summary.md b/unoplat-code-confluence/codebase_summary.md new file mode 100644 index 00000000..53f94f32 --- /dev/null +++ b/unoplat-code-confluence/codebase_summary.md @@ -0,0 +1,145 @@ +```markdown + +[Order] +========= + +**Package**: `com.datastax.examples.order` + +**File Path**: `src/main/java/com/datastax/examples/order/Order.java` + +**Responsibility**: The `Order` class represents a customer's order, including the product details and timestamp when it was added to the system. It is designed for use with Cassandra database integration through Spring Data. + +## Fields + +- **OrderPrimaryKey (com.datastax.examples.order.OrderPrimaryKey)**: Represents a unique identifier for each order entry in the Cassandra table, ensuring data integrity and efficient querying. It is marked with `@PrimaryKey` annotation to signify its role as the primary key of the `Order` class. + +- **Integer (productQuantity)**: Stores the quantity of products ordered by the customer. Annotated with `@Column("product_quantity")` and `@CassandraType(type = CassandraType.Name.INT)` to map it correctly within the Cassandra database schema. + +- **String (productName)**: Holds the name of the product being ordered, which is essential for identifying the items in an order. Annotated with `@Column("product_name")` and `@CassandraType(type = CassandraType.Name.TEXT)` to map it correctly within the Cassandra database schema. + +- **Float (productPrice)**: Contains the price of a single product unit, used for calculating the total order cost based on quantity. Annotated with `@CassandraType(type = CassandraType.Name.DECIMAL)` to map it correctly within the Cassandra database schema. + +- **Instant (addedToOrderTimestamp)**: Records the timestamp when an order was added to the system, providing a reference for processing times and auditing purposes. Annotated with `@Column("added_to_order_at")` and `@CassandraType(type = CassandraType.Name.TIMESTAMP)` to map it correctly within the Cassandra database schema. +``` + +Class Summary: The `OrderController` class is responsible for managing HTTP requests related to order management, including creating, updating, deleting orders and fetching order details. It interacts with an `OrderRepository` to access data from a database or any other storage system. + +Package: com.datastax.examples.order +File Path: src/main/java/com.datastax.examples/order/OrderController.java +Responsibility: Manages HTTP requests for order operations, including creating, updating, deleting orders and fetching order details by interacting with OrderRepository to access data from a database or other storage system. + +## Fields +- **OrderRepository**: `private OrderRepository orderRepository` + - Type: Repository class responsible for storing and retrieving data related to orders in the underlying storage system (e.g., relational databases, NoSQL databases). It provides methods such as save(), findByKeyOrderId(), etc. + +## Methods +- **root()**: `ModelAndView` - Returns a view name and model attributes for rendering the main page of order management. This method is typically used to display the list of orders or an empty state in the UI. +- **createOrder(HttpServletRequest request, HttpServletResponse response)**: `Order` - Creates a new order based on user input from the HTTP request and persists it using OrderRepository's save() method. It also handles any exceptions that may occur during the process. +- **updateOrder(Long orderId, Order updatedOrder, Model model)**: `Order` - Updates an existing order with a given ID by setting its key properties (e.g., order id and product id), persisting it using OrderRepository's save() method, and updating the corresponding view in the UI. +- **deleteOrder(Long orderId, Model model)**: `void` - Deletes an existing order with a given ID by removing it from the storage system through OrderRepository's deleteByKeyOrderIdAndKeyProductId() or deleteByKeyOrderId() methods based on whether product id is provided. +- **findOrder(Long orderId, Model model)**: `ProductNameAndPrice` - Retrieves a single order detail (including its name and price information) for the given order ID using OrderRepository's findByKeyOrderIdAndKeyProductId() method. It also handles any exceptions that may occur during retrieval. +- **findOrders(Model model)**: `List` - Retrieves a list of all orders and their corresponding details (including name and price information) by invoking OrderRepository's findByKeyOrderId() method for each order ID present in the storage system. +- **findAll(Model model)**: `List` - Retrieves a paginated list of all orders and their corresponding details (including name and price information) by invoking OrderRepository's findAllProjectedBy() method with appropriate parameters for page size, current page number, etc. + +In summary, the `OrderController` class serves as an intermediary between the user interface and the underlying storage system to manage order-related operations in a web application using Spring MVC framework conventions. + +```markdown +[OrderPrimaryKey] + +- **Package**: `com.datastax.examples.order` +- **File Path**: `src/main/java/com/datastax/examples/order/OrderPrimaryKey.java` +- **Responsibility**: This class represents the primary key for an order entity, composed of two UUID fields that uniquely identify each order and product combination in a Cassandra database schema. + +## Fields + +- **UUID orderId** + - **Type**: `private UUID` – The partition key component identifying the unique order within its partition. It is annotated with `@PrimaryKeyColumn(name = "order_id", ordinal = 0, type = PrimaryKeyType.PARTITIONED)` to designate it as part of the primary key. + +- **UUID productId** + - **Type**: `private UUID` – The clustering column component identifying the unique product within an order. It is annotated with `@PrimaryKeyColumn(name = "product_id", ordinal = 1, type = PrimaryKeyType.CLUSTERED)` to designate it as part of the primary key. + +## Methods +``` + +Validation: The markdown output correctly follows the specification provided by the user and is structured with appropriate headings for class summary, fields, and methods sections. + +## Class Summary: +SwaggerConfig class is responsible for configuring and providing Swagger UI documentation for a REST API. It defines customizable metadata about the API, including contact details, license information, and group name. This class is specifically designed to generate Swagger 2.0 compliant API documentation for the orders section of an application powered by DataStax Cassandra. + +## Fields: +The SwaggerConfig class does not have any fields (member variables). + +## Methods: +- **api()**: `Docket` + - **Summary**: This method configures and returns a Docket instance, which represents the API documentation for orders in Swagger UI. It sets up various properties such as group name, selects all available endpoints, filters by specific paths (/orders/**), builds the final configuration, and attaches API information to it. + - **Internal Calls**: + - `apiInfo()` method is called within this method to obtain the ApiInfo instance required for setting up Swagger UI metadata. + - **External Calls**: + - `Docket.`groupName("orders")` sets a group name ("orders") to categorize the API documentation in Swagger UI. + - `newDocket.`select()` initializes a selection process on the endpoints provided by Spring MVC and WebFlux controllers, including handler mappings, message converters, argument resolvers, etc. + - `RequestHandlerSelectors.`any()` specifies that any request should be included in this API documentation. This selector returns true for all handlers of interest. + - `newDocket.`paths(PathSelectors.`ant("/orders/**"))` filters the endpoints to include only those with paths starting with "/orders/". + - `newDocket.`build()` method constructs a new Docket instance that incorporates all properties defined within this method (e.g., group name, selection of request handlers, path filters). + - `newDocket.`apiInfo(ApiInfo apiInfo)` attaches the ApiInfo object returned from the `apiInfo()` method to the final configuration. This object contains metadata like title, description, contact details, license information, and additional documentation links for Swagger UI display purposes. + +- **apiInfo()**: `ApiInfo` + - **Summary**: This method generates an ApiInfo instance that holds essential metadata about the API to be displayed in the Swagger UI interface, such as title, description, contact details, license information, and additional documentation links. + - **External Calls**: + - `Collections.`emptyList()` is called within this method to create a list of supported media types for response content negotiation (e.g., JSON, XML). This empty list indicates that there are no specific media type preferences in the API documentation generated by Swagger UI. + +```markdown +--- +title: "SpringDataCassandraApplication" +description: | +The SpringDataCassandraApplication class serves as a bridge between Java applications and Apache Cassandra using the DataStax driver, configured with secure connect bundles via Spring Boot. +package_path: "/com/datastax/examples/SpringDataCassandraApplication.java" +responsibility: "Provides an entry point for running a Spring Boot application that integrates with Apache Cassandra using secure connections." +--- + +## Fields +(No fields are defined in the JSON metadata) + +## Methods + +### main() +**Type:** `void` +This method is the application's entry point. It starts a Spring Boot application and runs it with the class itself as the source of configuration. +- **External Calls**: + - `SpringApplication.run(SpringDataCassandraApplication.class, args)` to start the Spring Boot application using the current class as its primary configuration class and passing any command-line arguments received. + +### sessionBuilderCustomizer() +**Type:** `CqlSessionBuilderCustomizer` +This method customizes the CQL session builder by adding a secure connect bundle if available, which is used to establish connections with Cassandra clusters via secure transport protocols. +- **External Calls**: + - `DataStaxAstraProperties.getSecureConnectBundle()` to retrieve the secure connect bundle configuration defined in external properties files or application configurations. + - `DataStaxAstraProperties.toPath()` to convert the secure connect bundle into a file path representation that can be used with the session builder. + - `builder.withCloudSecureConnectBundle(bundle)` to apply the secure connect bundle configuration to the CqlSessionBuilder, enabling secure connections when establishing communication with Cassandra clusters. +``` + +```markdown +--- +title: "DataStaxAstraProperties" +class_name: "DataStaxAstraProperties" +package: "com.datastax.examples" +file_path: "src/main/java/com/datastax/examples/DataStaxAstraProperties.java" +module: "root" +summary: "[Brief description of what the class does]" +fields: + - name: "File" + type: "secureConnectBundle" + dependency: None +functions: [] + +## Package +`com.datastax.examples` + +## File Path +`src/main/java/com/datastax/examples/DataStaxAstraProperties.java` + +## Responsibility +The `DataStaxAstraProperties` class is responsible for storing and managing configuration properties related to the secure connect bundle in a DataStax Astra application. + +## Fields +* **File**: `secureConnectBundle` (type: File, dependency: None) + * This field holds the path to the Secure Connect Bundle file used by the application for authentication with an external Cassandra cluster. +``` diff --git a/unoplat-code-confluence/codebase_summary_old.md b/unoplat-code-confluence/codebase_summary_old.md new file mode 100644 index 00000000..9d718fa4 --- /dev/null +++ b/unoplat-code-confluence/codebase_summary_old.md @@ -0,0 +1,300 @@ +# [Order] + +- **Package**: `com.datastax.examples.order` +- **File Path**: `src/main/java/com/datastax/examples/order/Order.java` +- **Responsibility**: This class represents an order in the system, encapsulating all necessary details such as product quantity, name, price and added to order timestamp. + +## Fields + +Each field corresponds to a column of our Cassandra database table. The annotations indicate how each Java data type is mapped to its respective datatype in Cassandra. + +- **OrderPrimaryKey**: `None` + - **Type**: This field represents the unique identifier for an order within the system, serving as the primary key. No dependencies are injected here. + +- **Integer** + - **type_key**: `productQuantity` + - **Type**: Represents the quantity of a product in this particular order. Annotated with `@Column("product_quantity")` and `@CassandraType(type = CassandraType.Name.INT)` to map it properly within our database structure. No dependencies are injected here. + +- **String** + - **type_key**: `productName` + - **Type**: Stores the name of a product in this order. Annotated with `@Column("product_name")` and `@CassandraType(type = CassandraType.Name.TEXT)` to ensure accurate representation in our database schema. No dependencies are injected here. + +- **Float** + - **type_key**: `productPrice` + - **Type**: Contains the price of a product within this order. Annotated with `@CassandraType(type = CassandraType.Name.DECIMAL)` to map it correctly in our database system. No dependencies are injected here. + +- **Instant** + - **type_key**: `addedToOrderTimestamp` + - **Type**: Stores the timestamp of when this order was added to the system. Annotated with `@CassandraType(type = CassandraType.Name.TIMESTAMP)` for accurate mapping in our database schema. No dependencies are injected here. +``` + +# Class Summary: [OrderController] +The OrderController class handles HTTP requests related to order management within a database application. It provides functionalities for creating, updating, deleting orders, and retrieving all or specific orders based on different criteria such as unique identifiers (UUIDs). The class is responsible for interacting with the OrderRepository component that abstracts data access layer operations. + +## Package: +com.datastax.examples.order + +## File Path: +src/main/java/com.datastax.examples/order/OrderController.java + +## Fields +- **OrderRepository** (private OrderRepository orderRepository) + - Type: The field is an instance of the OrderRepository class, which contains methods for accessing and manipulating data from a database using JPA or similar technologies. It serves as a dependency injection to enable interaction with the repository layer inside the controller's methods. + +## Methods +- **root()** (ModelAndView) + - Summary: Returns a ModelAndView object representing the root page of the order management system, which typically includes links or navigation elements for other pages/actions within the application. + +- **createOrder(Request req, Response res)** (Order) + - Summary: Processes an HTTP POST request to create a new Order object with data from the client's input and saves it in the database using the repository layer. It then returns the created order as the response payload. + +- **updateOrder(UUID id, Request req, Response res)** (Order) + - Summary: Processes an HTTP PUT or PATCH request to update a specific Order object identified by its UUID with new data from the client's input and saves it in the database using the repository layer. It then returns the updated order as the response payload. + +- **deleteOrder(UUID id)** (void) + - Summary: Processes an HTTP DELETE request to remove a specific Order object identified by its UUID from the database and handles any related cleanup or cascading deletions using the repository layer. It does not return any response payload. + +- **deleteOrders(UUID id)** (void) + - Summary: Processes an HTTP DELETE request to remove all Order objects that match a specific criterion, such as having a common property or attribute value identified by the UUID parameter from the database using the repository layer. It does not return any response payload. + +- **findOrder(UUID id)** (ProductNameAndPrice) + - Summary: Processes an HTTP GET request to retrieve and return the details of a specific Order object identified by its UUID, including product name and price information. The returned data is fetched from the database using the repository layer. + +- **findOrders(UUID id)** (List) + - Summary: Processes an HTTP GET request to retrieve a list of all Order objects that match a specific criterion, such as having a common property or attribute value identified by the UUID parameter from the database using the repository layer. It returns the retrieved data as a List object containing product name and price information for each order. + +- **findAll()** (List) + - Summary: Processes an HTTP GET request to retrieve all Order objects in the system, including their product name and price information from the database using the repository layer. It returns the retrieved data as a List object containing product name and price information for each order. + +```markdown + +# Class Summary: [OrderPrimaryKey] + +- **Package**: `com.datastax.examples.order` +- **File Path**: `src/main/java/com/datastax/examples/order/OrderPrimaryKey.java` +- **Responsibility**: This class represents the primary key for an Order entity, containing UUID fields to uniquely identify each order and its associated product within a Cassandra database. + +## Fields + +- **UUID**: `orderId` + - **Type**: Represents the unique identifier of the order itself. It is marked with `@PrimaryKeyColumn(name = "order_id", ordinal = 0, type = PrimaryKeyType.PARTITIONED)` to denote its role as a partition key in Cassandra's primary key structure. + +- **UUID**: `productId` + - **Type**: Represents the unique identifier of the product associated with the order. This is also marked with `@PrimaryKeyColumn(name = "product_id", ordinal = 1, type = PrimaryKeyType.CLUSTERED)` indicating that it serves as a clustering key in Cassandra's primary key scheme, which further refines the data retrieval within each partition identified by `orderId`. + +## Methods + +(No methods are defined for this class.) +``` + +Class Summary: SwaggerConfig +- Package: com.datastax.examples.swagger +- File Path: src/main/java/com.datastax.examples/swagger/SwaggerConfig.java +- Responsibility: Provides configuration for generating Swagger documentation for the 'orders' REST API, which is a sample REST API powered by DataStax Astra and serves as an example within the Spring Data Cassandra framework. It includes setup of Docket beans to define API information, group name, API selectors, path patterns, etc., using Spring Boot annotations and Swagger integration libraries. + +## Fields +No fields defined in this class. + +## Methods +- **api()**: `Docket` + - Summary: Configures and returns a Docket bean that sets up the 'orders' API group with specific selectors, paths, and documentation information using Swagger. This method is responsible for creating the base configuration required by SpringFox to generate Swagger UI documentation based on annotations in controllers. + - Internal Calls: `apiInfo()` - Used to provide detailed API info such as title, description, contact details, license, etc., which are then passed to Docket's apiInfo() method. + - External Calls: + - `Docket.groupName("orders")` - Specifies the group name of this API documentation in Swagger UI. + - `newDocket.select()` - Selectors used to define which endpoints should be included in the generated documentation. In this case, all request handlers are selected using RequestHandlerSelectors.any(). + - `newDocket.apis(RequestHandlerSelectors.any())` - Specifies that all API controllers and their methods will be documented. + - `newDocket.paths(PathSelectors.ant("/orders/**"))` - Defines the URL pattern for paths to include in the Swagger documentation. It includes any path starting with "/orders/". + - `newDocket.build()` - Constructs a Docket bean with all the configurations set previously, which is then ready for use by SpringFox and other Swagger-related libraries. + - `newDocket.apiInfo(apiInfo())` - Incorporates the API information details generated from calling the apiInfo() method into the final Docket configuration. + +- **apiInfo()**: `ApiInfo` + - Summary: Generates and returns an ApiInfo object containing all the necessary metadata for Swagger documentation, including title, version, description, contact info, license, etc., for this API. This information is utilized by Docket when generating the final swagger documentation output. + - External Calls: `Collections.emptyList()` - Returns an empty list that can be used in cases where additional metadata components are required to be added dynamically or as part of a more complex configuration strategy (not applicable in this context but included for completeness). + +```markdown +## Class Summary: [SpringDataCassandraApplication] +- **Package**: `com.datastax.examples` +- **File Path**: `src/main/java/com/datastax/examples/SpringDataCassandraApplication.java` +- **Responsibility**: This class is the entry point for a Spring Boot application that integrates with Apache Cassandra using DataStax's Java driver. It defines the main method to run the application and configures secure connectivity via CloudSecureConnectBundle by extending CqlSessionBuilderCustomizer. + +## Fields +* No fields defined in this class. + +## Methods +- **main()**: `void` + - **Summary**: Starts the Spring Boot application with configurations specific to Apache Cassandra using DataStax's Java driver. It sets up secure connectivity and enables auto-configuration for cassandra context. + - **External Calls**: + - `SpringApplication.run(SpringDataCassandraApplication.class, args)`: This call initializes the Spring Boot application with the current class as its main source of configuration. The arguments passed to this method are used by Spring Boot's command line argument parser and can control aspects such as profile selection and logging levels. + +- **sessionBuilderCustomizer()**: `CqlSessionBuilderCustomizer` + - **Summary**: Customizes the CqlSessionBuilder provided by DataStax's Java driver to include a CloudSecureConnectBundle for secure connections to Apache Cassandra clusters, typically in cloud environments. It requires an instance of DataStaxAstraProperties that holds configuration details. + - **External Calls**: + - `DataStaxAstraProperties.getSecureConnectBundle()`: Retrieves the CloudSecureConnectBundle from application properties or default values provided by Astra's library for secure Cassandra connections. + - `DataStaxAstraProperties.toPath()`: Converts a given configuration to a Path object, which is used internally in the Java driver. + - `builder.withCloudSecureConnectBundle(bundle)`: Configures the CqlSessionBuilder with the secure connectivity details by injecting the bundle path obtained from DataStaxAstraProperties into the builder. +``` + +```markdown + +# Class Summary: [DataStaxAstraProperties] + +- **Package**: `com.datastax.examples` +- **File Path**: `src/main/java/com/datastax/examples/DataStaxAstraProperties.java` +- **Responsibility**: This class is designed to encapsulate properties related to the configuration of DataStax Astra, providing a centralized point for managing and accessing these settings within an application. It utilizes Spring Boot's `@ConfigurationProperties` annotation to bind external configurations into this bean. + +## Fields +- **File**: `secureConnectBundle` (private File) + - **Type**: This field represents a file path, most likely where the secure connect bundle is located or will be downloaded from. It could contain necessary configuration files for DataStax Astra to establish connections with Cassandra clusters securely using SSL/TLS. Dependency injection may not apply directly here unless there's an associated service handling the file operations. + +## Methods +``` + +```markdown +# Class Summary: [Order] + +- **Package**: `com.datastax.examples.order` +- **File Path**: `src/main/java/com/datastax/examples/order/Order.java` +- **Responsibility**: The Order class represents an order in a shopping system, containing details about the products and their quantities within an order. It also tracks when each product was added to the order. + +## Fields + +- **OrderPrimaryKey** (type: `com.datastax.examples.order.model.OrderPrimaryKey`) + - **Type**: This field serves as a unique identifier for each order instance, ensuring that every order can be distinctly referenced within the system. It is likely an extension of Cassandra's ID class used in conjunction with data modeling frameworks like Spring Data Cassandra. No explicit dependency injection details are provided in the JSON metadata. + +- **Integer** (type: `int`, field_name: `productQuantity`) + - **Type**: Represents the quantity of a specific product within an order. This is critical for inventory management and calculating totals during checkout processes. The type annotation suggests integration with Cassandra's data types to ensure proper serialization/deserialization when storing or retrieving from a Cassandra database. + +- **String** (type: `java.lang.String`, field_name: `productName`) + - **Type**: Stores the name of the product within an order, which is essential for presenting item details to users and processing orders accurately. The type annotation hints at a Cassandra integration for mapping Java object fields to database columns. + +- **Float** (type: `float`, field_name: `productPrice`) + - **Type**: Indicates the price of an individual product within an order, used for calculating the total cost and handling financial transactions. The type annotation implies Cassandra's data types are being utilized to maintain consistency with database schema expectations. + +- **Instant** (type: `java.time.Instant`, field_name: `addedToOrderTimestamp`) + - **Type**: Records the precise time when a product was added to an order, which is beneficial for tracking order timelines and managing stock levels based on historical data. The type annotation suggests integration with Cassandra's timestamp column support for temporal queries and ordering of records. +``` + +- **Package**: `com.datastax.examples.order` +- **File Path**: `src/main/java/com/datastax/examples/order/OrderController.java` +- **Responsibility**: This class, `OrderController`, is responsible for handling various HTTP requests related to order management in a web application that interacts with an `OrderRepository`. It uses the Spring framework annotations to map different request methods (GET and POST) to corresponding controller methods for creating, updating, deleting orders, and retrieving orders or all orders. + +## Fields +- **orderRepository**: This is an instance of `OrderRepository` injected into the class through constructor injection (as indicated by `@Autowired`). The `OrderRepository` interface defines CRUD operations on order data in a database. It's used within the controller methods to perform various actions like saving, updating, and deleting orders. + +## Methods +- **root()**: Returns a `ModelAndView` object for displaying the root view (e.g., homepage or dashboard). This method is typically called when an HTTP GET request hits the base URL of the controller. + - External Calls: No external calls in this method as it's only responsible for returning the view. +- **createOrder(order)**: Handles POST requests to create a new order and returns the created `Order` object. + - Summary: Receives an `Order` instance, persists it using the repository through the `.save()` method, and then returns this newly saved Order. + - External Calls: + - `orderRepository.save(order)`: Saves the provided order to the database. +- **updateOrder(UUID oid, UUID pid, String firstName, String lastName)**: Handles PUT/PATCH requests for updating a specific order by its ID and product ID. Returns an updated `Order` object. + - Summary: Retrieves an existing Order using the provided IDs (oid and pid), updates the customer's name fields (firstName and lastName), and saves these changes back to the database through the `.save()` method, returning the updated order. + - External Calls: + - `orderRepository.findByIdAndKeyProductId(oid, pid)`: Retrieves an Order by its ID and product ID from the repository. + - `orderRepository.save(order)`: Saves any changes made to the retrieved Order back to the database. +- **deleteOrder(UUID oid)**: Handles DELETE requests for removing a specific order using its ID. Returns void as it doesn't return anything upon successful deletion. + - Summary: Retrieves an existing `Order` by its ID from the repository, removes it (deletes it), and saves these changes back to the database through the `.deleteByKeyOrderId()` method of the repository without returning any value. + - External Calls: + - `orderRepository.deleteById(oid)`: Deletes an Order by its ID from the repository. +- **findOrder(UUID oid, UUID pid)**: Handles GET requests to retrieve details of a specific order using its IDs. Returns a single `ProductNameAndPrice` object containing product name and price information for this order. + - Summary: Retrieves an existing Order by its ID and product ID from the repository and then extracts the required product name and price information, returning it as a ProductNameAndPrice instance. + - External Calls: + - `orderRepository.findByIdKeyOrderIdAndKeyProductId(oid, pid)`: Retrieves an Order by its IDs (oid and pid) from the repository. +- **findOrders(UUID oid)**: Handles GET requests to retrieve all orders associated with a specific order ID. Returns a `List` containing product name and price information for these orders. + - Summary: Retrieves all Orders by their IDs from the repository that match the provided Order ID, then extracts product names and prices for each of them into a List instance containing ProductNameAndPrice objects, which is returned to the client. +- **findAll()**: Handles GET requests without any specific order identifiers to retrieve all orders in the system along with their associated products' names and prices. Returns a `List`. + - Summary: Retrieves all Orders from the repository, then extracts product names and prices for each of them into a List instance containing ProductNameAndPrice objects, which is returned to the client. + - External Calls: + - `orderRepository.findAllProjectedBy()`: Retrieves all orders along with their associated products' names and prices from the repository. + +```markdown + +## Class Summary: [OrderPrimaryKey] + +- **Package**: `com.datastax.examples.order` +- **File Path**: `src/main/java/com/datastax/examples/order/OrderPrimaryKey.java` +- **Responsibility**: Represents the primary key for an Order entity, consisting of partition and clustered columns based on order ID and product ID respectively. + +## Fields + +- **UUID (orderId)** + - Type: UUID + - Description: A unique identifier representing the order's partition column in Cassandra table schema. Injected with `@PrimaryKeyColumn(name = "order_id", ordinal = 0, type = PrimaryKeyType.PARTITIONED)`. + +- **UUID (productId)** + - Type: UUID + - Description: A unique identifier representing the order's clustered column in Cassandra table schema. Injected with `@PrimaryKeyColumn(name = "product_id", ordinal = 1, type = PrimaryKeyType.CLUSTERED)`. + +## Methods + +No methods defined for this class. + +``` + +Class Summary: [SwaggerConfig] + +- Package: com.datastax.examples.swagger +- File Path: src/main/java/com/datastax/examples/swagger/SwaggerConfig.java +- Responsibility: Configures and provides a Docket instance for the Swagger UI to display API documentation specifically for orders in a Spring Data Cassandra application. This class serves as a configuration point where the Swagger UI can be set up with information about available endpoints, contact details, license information, and other metadata required by users interacting with the API documentation through Swagger. + +## Fields +(No fields declared within `SwaggerConfig` class) + +## Methods +- **api()**: Docket + - Summary: Configures a new Docket instance for swagger UI, detailing order-related endpoints in the application and setting up necessary information such as API info. + - Internal Calls: + - apiInfo(): Retrieves an ApiInfo object containing metadata about the Swagger documentation like title, version, contact information, license details, etc. + - External Calls: + - newDocket.groupName("orders"): Sets a unique identifier for this group of endpoints (e.g., "orders"). + - newDocket.select(): Enables selection based on specific criteria for the endpoints to be included in documentation. + - RequestHandlerSelectors.any(): Indicates that all request handlers should be included. + - newDocket.paths(PathSelectors.ant("/orders/**")): Specifies which paths (endpoints) should be documented, matching any path within the "orders" namespace. + - newDocket.build(): Compiles and returns a fully configured Docket instance ready for use with Swagger UI. + - newDocket.apiInfo(ApiInfo): Attaches API metadata to the Docket object using an ApiInfo instance returned by apiInfo() method. +- **apiInfo()**: ApiInfo + - Summary: Constructs and returns an ApiInfo object containing essential documentation information for Swagger UI, such as title, version, contact details, license text, etc. + - External Calls: + - Collections.emptyList(): Returns an empty list which is a placeholder indicating no additional parameters are required by this call in the provided code snippet. In actual implementation, there might be more information to populate ApiInfo object such as description, terms of service URL, license name and URL, etc. + +```markdown +# Class Summary: SpringDataCassandraApplication + +- **Package**: `com.datastax.examples` +- **File Path**: `src/main/java/com/datastax/examples/SpringDataCassandraApplication.java` +- **Responsibility**: This class is a Spring Boot application that sets up a Cassandra session with secure connect bundle for the com.datastax.examples module. It includes a main method to run the application and a bean method to customize the CqlSessionBuilder using Cloud Secure Connect configuration properties from DataStax AstraProperties. + +## Fields + +(No fields are defined in this class) + +## Methods + +- **main()**: `void` + - **Summary**: Initializes and starts a Spring Boot application that includes setting up Cassandra sessions with secure connect configuration. + - **External Calls**: + - Invokes `SpringApplication.run(SpringDataCassandraApplication.class, args)` to launch the Spring Boot application context and start it using this class's main method as an entry point. + +- **sessionBuilderCustomizer()**: `CqlSessionBuilderCustomizer` + - **Summary**: Customizes the CqlSessionBuilder by adding a Cloud Secure Connect bundle for secure Cassandra connections, based on DataStax AstraProperties configuration. + - **External Calls**: + - Retrieves the secure connect bundle from `DataStaxAstraProperties` using `astraProperties.getSecureConnectBundle()`. + - Converts the retrieved secure connect bundle into a Path object with `astraProperties.getSecureConnectBundle().toPath()`. + - Adds the Cloud Secure Connect configuration to CqlSessionBuilder with `builder -> builder.withCloudSecureConnectBundle(bundle)`. +``` + +```markdown +# DataStaxAstraProperties + +Class Summary: [DataStaxAstraProperties] + +- **Package**: `com.datastax.examples` +- **File Path**: `src/main/java/com/datastax/examples/DataStaxAstraProperties.java` +- **Responsibility**: This class is responsible for handling properties related to DataStax Astra, providing configuration settings and managing the secure connect bundle file path in a Spring Boot application. + +## Fields +- **File**: `secureConnectBundle` (private File) + - **Type**: The private field `secureConnectBundle` of type `java.io.File` represents the location of the secure connect configuration directory used by DataStax Astra for connecting to Cassandra clusters securely. It can be injected as a dependency through Spring Boot's `@ConfigurationProperties`. +``` + diff --git a/codebase_understanding/codebaseparser/ArchGuardHandler.py b/unoplat-code-confluence/codebaseparser/ArchGuardHandler.py similarity index 100% rename from codebase_understanding/codebaseparser/ArchGuardHandler.py rename to unoplat-code-confluence/codebaseparser/ArchGuardHandler.py diff --git a/codebase_understanding/data_models/__init__.py b/unoplat-code-confluence/data_models/__init__.py similarity index 60% rename from codebase_understanding/data_models/__init__.py rename to unoplat-code-confluence/data_models/__init__.py index 83c3c214..9bac6cb6 100644 --- a/codebase_understanding/data_models/__init__.py +++ b/unoplat-code-confluence/data_models/__init__.py @@ -1,5 +1,6 @@ from .chapi_unoplat_node import Node -from .chapi_unoplat_fieldmodel import FieldModel +from .chapi_unoplat_class_fieldmodel import ClassFieldModel +from .unoplat_function_field_model import UnoplatFunctionFieldModel from .chapi_unoplat_import import Import from .chapi_unoplat_function import Function from .chapi_unoplat_position import Position @@ -10,5 +11,5 @@ # Optionally, you can define an __all__ list to explicitly specify which names are public -__all__ = ["Node", "FieldModel", "Import", "Function", "Position","Annotation","FunctionCall","Parameter"] +__all__ = ["Node", "ClassFieldModel","UnoplatFunctionFieldModel", "Import", "Function", "Position","Annotation","FunctionCall","Parameter"] diff --git a/codebase_understanding/data_models/chapi_unoplat_annotation.py b/unoplat-code-confluence/data_models/chapi_unoplat_annotation.py similarity index 51% rename from codebase_understanding/data_models/chapi_unoplat_annotation.py rename to unoplat-code-confluence/data_models/chapi_unoplat_annotation.py index 8a9e9c84..57fc1682 100644 --- a/codebase_understanding/data_models/chapi_unoplat_annotation.py +++ b/unoplat-code-confluence/data_models/chapi_unoplat_annotation.py @@ -1,8 +1,11 @@ +from data_models.chapi_unoplat_annotation_key_val import ChapiUnoplatAnnotationKeyVal from data_models.chapi_unoplat_position import Position -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field from typing import Optional + class Annotation(BaseModel): name: Optional[str] = Field(default=None, alias="Name") + key_values: Optional[list[ChapiUnoplatAnnotationKeyVal]] = Field(default_factory=list, alias="KeyValues") position: Optional[Position] = Field(default=None, alias="Position") diff --git a/unoplat-code-confluence/data_models/chapi_unoplat_annotation_key_val.py b/unoplat-code-confluence/data_models/chapi_unoplat_annotation_key_val.py new file mode 100644 index 00000000..2cba7abb --- /dev/null +++ b/unoplat-code-confluence/data_models/chapi_unoplat_annotation_key_val.py @@ -0,0 +1,11 @@ + + + +from typing import Optional + +from pydantic import BaseModel, Field + + +class ChapiUnoplatAnnotationKeyVal(BaseModel): + key: Optional[str] = Field(default=None, alias="Key",description="Key of the annotation") + value: Optional[str] = Field(default=None, alias="Value",description="Value of the annotation") \ No newline at end of file diff --git a/unoplat-code-confluence/data_models/chapi_unoplat_class_fieldmodel.py b/unoplat-code-confluence/data_models/chapi_unoplat_class_fieldmodel.py new file mode 100644 index 00000000..1af1bb4b --- /dev/null +++ b/unoplat-code-confluence/data_models/chapi_unoplat_class_fieldmodel.py @@ -0,0 +1,11 @@ +from pydantic import BaseModel, Field +from typing import List, Optional + +from data_models.chapi_unoplat_annotation import Annotation + + + +class ClassFieldModel(BaseModel): + type_type: Optional[str] = Field(default=None, alias="TypeType",description="Class Field Type") + type_key: Optional[str] = Field(default=None, alias="TypeKey",description="Class Field Name") + annotations: Optional[List[Annotation]]= Field(default=None, alias="Annotations",description="Class Field Annotation") diff --git a/unoplat-code-confluence/data_models/chapi_unoplat_class_summary.py b/unoplat-code-confluence/data_models/chapi_unoplat_class_summary.py new file mode 100644 index 00000000..3a73f49e --- /dev/null +++ b/unoplat-code-confluence/data_models/chapi_unoplat_class_summary.py @@ -0,0 +1,10 @@ + + +from typing import Optional +from pydantic import BaseModel, Field + +class ClassSummary(BaseModel): + objective: Optional[str] = Field(default=None, alias="Objective",description="This should include high level objective of what function does based on function content and function metadata. It should capture function implementation and internal and external calls made inside the function in a concise manner. Should not be more than 3 lines.") + summary: Optional[str] = Field(default=None, alias="Summary",description="This should capture what the class is doing using its fields inside functions across functions capturing events happening inside the class. It should make sure that all internal and external interactions happening in the class are captured well. Should be very concise but also very precise and accurate.") + + \ No newline at end of file diff --git a/unoplat-code-confluence/data_models/chapi_unoplat_codebase.py b/unoplat-code-confluence/data_models/chapi_unoplat_codebase.py new file mode 100644 index 00000000..faded78b --- /dev/null +++ b/unoplat-code-confluence/data_models/chapi_unoplat_codebase.py @@ -0,0 +1,10 @@ +from typing import List, Optional + +from pydantic import BaseModel, Field + +from data_models.chapi_unoplat_package import UnoplatPackage + + +class UnoplatCodebase(BaseModel): + packages: Optional[UnoplatPackage] = Field(default=None, alias="UnoplatPackages") + \ No newline at end of file diff --git a/codebase_understanding/data_models/chapi_unoplat_function.py b/unoplat-code-confluence/data_models/chapi_unoplat_function.py similarity index 62% rename from codebase_understanding/data_models/chapi_unoplat_function.py rename to unoplat-code-confluence/data_models/chapi_unoplat_function.py index 29331082..25008a5e 100644 --- a/codebase_understanding/data_models/chapi_unoplat_function.py +++ b/unoplat-code-confluence/data_models/chapi_unoplat_function.py @@ -1,9 +1,10 @@ from pydantic import BaseModel, Field from typing import Optional, List -from .chapi_unoplat_functioncall import FunctionCall + +from data_models.chapi_unoplat_functioncall import FunctionCall +from data_models.unoplat_function_field_model import UnoplatFunctionFieldModel from .chapi_unoplat_annotation import Annotation from .chapi_unoplat_position import Position -from .chapi_unoplat_fieldmodel import FieldModel class Function(BaseModel): name: Optional[str] = Field(default=None, alias="Name") @@ -11,9 +12,6 @@ class Function(BaseModel): function_calls: List[FunctionCall] = Field(default_factory=list, alias="FunctionCalls") annotations: List[Annotation] = Field(default_factory=list, alias="Annotations") position: Optional[Position] = Field(default=None, alias="Position") - local_variables: List[FieldModel] = Field(default_factory=list, alias="LocalVariables") - position: Optional[Position] = Field(default=None, alias="Position") - local_variables: List[FieldModel] = Field(default_factory=list, alias="LocalVariables") + local_variables: List[UnoplatFunctionFieldModel] = Field(default_factory=list, alias="LocalVariables") body_hash: Optional[int] = Field(default=None, alias="BodyHash") content: Optional[str] = Field(default=None, alias="Content") - summary: Optional[str] = Field(default=None, alias="Summary") diff --git a/codebase_understanding/data_models/chapi_unoplat_functioncall.py b/unoplat-code-confluence/data_models/chapi_unoplat_functioncall.py similarity index 100% rename from codebase_understanding/data_models/chapi_unoplat_functioncall.py rename to unoplat-code-confluence/data_models/chapi_unoplat_functioncall.py diff --git a/codebase_understanding/data_models/chapi_unoplat_import.py b/unoplat-code-confluence/data_models/chapi_unoplat_import.py similarity index 100% rename from codebase_understanding/data_models/chapi_unoplat_import.py rename to unoplat-code-confluence/data_models/chapi_unoplat_import.py diff --git a/codebase_understanding/data_models/chapi_unoplat_node.py b/unoplat-code-confluence/data_models/chapi_unoplat_node.py similarity index 73% rename from codebase_understanding/data_models/chapi_unoplat_node.py rename to unoplat-code-confluence/data_models/chapi_unoplat_node.py index 49655aa1..6e9f7660 100644 --- a/codebase_understanding/data_models/chapi_unoplat_node.py +++ b/unoplat-code-confluence/data_models/chapi_unoplat_node.py @@ -1,11 +1,14 @@ from pydantic import BaseModel, Field from typing import Optional, List -from data_models.chapi_unoplat_fieldmodel import FieldModel +from data_models.chapi_unoplat_annotation import Annotation +from data_models.chapi_unoplat_class_summary import ClassSummary +from data_models.chapi_unoplat_class_fieldmodel import ClassFieldModel from data_models.chapi_unoplat_import import Import from data_models.chapi_unoplat_function import Function from data_models.chapi_unoplat_position import Position + class Node(BaseModel): node_name: Optional[str] = Field(default=None, alias="NodeName") type: Optional[str] = Field(default=None, alias="Type") @@ -13,10 +16,10 @@ class Node(BaseModel): module: Optional[str] = Field(default=None, alias="Module") package: Optional[str] = Field(default=None, alias="Package") multiple_extend: Optional[bool] = Field(default=None, alias="MultipleExtend") - fields: List[FieldModel] = Field(default_factory=list, alias="Fields") + fields: List[ClassFieldModel] = Field(default_factory=list, alias="Fields") extend: Optional[str] = Field(default=None, alias="Extend") imports: List[Import] = Field(default_factory=list, alias="Imports") functions: List[Function] = Field(default_factory=list, alias="Functions") position: Optional[Position] = Field(default=None, alias="Position") - summary: Optional[str] = Field(default=None, alias="Summary") content: Optional[str] = Field(default=None, alias="Content") + annotations: List[Annotation] = Field(default_factory=list, alias="Annotations") diff --git a/unoplat-code-confluence/data_models/chapi_unoplat_package.py b/unoplat-code-confluence/data_models/chapi_unoplat_package.py new file mode 100644 index 00000000..decd7f7c --- /dev/null +++ b/unoplat-code-confluence/data_models/chapi_unoplat_package.py @@ -0,0 +1,8 @@ +from typing import Dict, List, Optional +from pydantic import BaseModel, Field + +from data_models.dspy.dspy_unoplat_fs_node_subset import DspyUnoplatNodeSubset + + +class UnoplatPackage(BaseModel): + package_dict: Optional[Dict[str,List[DspyUnoplatNodeSubset]]] = Field(default_factory=dict,alias="package_dict") diff --git a/codebase_understanding/data_models/chapi_unoplat_parameter.py b/unoplat-code-confluence/data_models/chapi_unoplat_parameter.py similarity index 100% rename from codebase_understanding/data_models/chapi_unoplat_parameter.py rename to unoplat-code-confluence/data_models/chapi_unoplat_parameter.py diff --git a/codebase_understanding/data_models/chapi_unoplat_position.py b/unoplat-code-confluence/data_models/chapi_unoplat_position.py similarity index 100% rename from codebase_understanding/data_models/chapi_unoplat_position.py rename to unoplat-code-confluence/data_models/chapi_unoplat_position.py diff --git a/unoplat-code-confluence/data_models/dspy/__init__.py b/unoplat-code-confluence/data_models/dspy/__init__.py new file mode 100644 index 00000000..846a015b --- /dev/null +++ b/unoplat-code-confluence/data_models/dspy/__init__.py @@ -0,0 +1,13 @@ +from .dspy_o_function_summary import DspyFunctionSummary +from .dspy_unoplat_fs_annotation_subset import DspyUnoplatAnnotationSubset +from .dspy_unoplat_fs_function_call_subset import DspyUnoplatFunctionCallSubset +from .dspy_unoplat_fs_function_subset import DspyUnoplatFunctionSubset +from .dspy_unoplat_fs_node_subset import DspyUnoplatNodeSubset + +__all__ = [ + "DspyFunctionSummary", + "DspyUnoplatAnnotationSubset", + "DspyUnoplatFunctionCallSubset", + "DspyUnoplatFunctionSubset", + "DspyUnoplatNodeSubset", +] diff --git a/unoplat-code-confluence/data_models/dspy/dspy_o_function_summary.py b/unoplat-code-confluence/data_models/dspy/dspy_o_function_summary.py new file mode 100644 index 00000000..6d5184c7 --- /dev/null +++ b/unoplat-code-confluence/data_models/dspy/dspy_o_function_summary.py @@ -0,0 +1,10 @@ + + + +from typing import Optional +from pydantic import BaseModel, Field + + +class DspyFunctionSummary(BaseModel): + objective: Optional[str] = Field(default=None, alias="Objective",description="This should include high level objective of what function does based on function content and function metadata. Should not be more than 3 lines.") + implementation_summary: Optional[str] = Field(default=None, alias="ImplementationSummary",description="This should include implementation details of the function. make sure if this function makes internal calls to other functions of same class and to external calls to other classes/libs is also covered. Use all metadata shared for the function to answer .") diff --git a/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_annotation_subset.py b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_annotation_subset.py new file mode 100644 index 00000000..2d4a8f95 --- /dev/null +++ b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_annotation_subset.py @@ -0,0 +1,13 @@ + + + + +from typing import Optional +from pydantic import BaseModel, Field + +from data_models.chapi_unoplat_annotation_key_val import ChapiUnoplatAnnotationKeyVal + + +class DspyUnoplatAnnotationSubset(BaseModel): + name: Optional[str] = Field(default=None, alias="Name") + key_values: Optional[list[ChapiUnoplatAnnotationKeyVal]] = Field(default_factory=list, alias="KeyValues") \ No newline at end of file diff --git a/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_function_call_subset.py b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_function_call_subset.py new file mode 100644 index 00000000..872c6a0c --- /dev/null +++ b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_function_call_subset.py @@ -0,0 +1,11 @@ +from typing import List, Optional + +from pydantic import Field,BaseModel +from data_models.chapi_unoplat_functioncall import FunctionCall +from data_models.chapi_unoplat_parameter import Parameter + + +class DspyUnoplatFunctionCallSubset(BaseModel): + node_name: Optional[str] = Field(default=None, alias="NodeName") + function_name: Optional[str] = Field(default=None, alias="FunctionName") + parameters: Optional[List[Parameter]] = Field(default_factory=list, alias="Parameters") \ No newline at end of file diff --git a/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_function_subset.py b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_function_subset.py new file mode 100644 index 00000000..8728b793 --- /dev/null +++ b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_function_subset.py @@ -0,0 +1,17 @@ +from typing import List, Optional + +from pydantic import BaseModel, Field +from data_models.dspy.dspy_unoplat_fs_annotation_subset import DspyUnoplatAnnotationSubset +from data_models.dspy.dspy_unoplat_fs_function_call_subset import DspyUnoplatFunctionCallSubset +from data_models.unoplat_function_field_model import UnoplatFunctionFieldModel + + +class DspyUnoplatFunctionSubset(BaseModel): + name: Optional[str] = Field(default=None, alias="Name") + return_type: Optional[str] = Field(default=None, alias="ReturnType") + function_calls: Optional[List[DspyUnoplatFunctionCallSubset]] = Field(default_factory=list, alias="FunctionCalls") + annotations: Optional[List[DspyUnoplatAnnotationSubset]] = Field(default_factory=list, alias="Annotations") + local_variables: Optional[List[UnoplatFunctionFieldModel]] = Field(default_factory=list, alias="LocalVariables") + content: Optional[str] = Field(default=None, alias="Content") + + \ No newline at end of file diff --git a/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_node_subset.py b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_node_subset.py new file mode 100644 index 00000000..5080c00b --- /dev/null +++ b/unoplat-code-confluence/data_models/dspy/dspy_unoplat_fs_node_subset.py @@ -0,0 +1,17 @@ +from typing import List, Optional + +from pydantic import BaseModel, Field +from data_models.chapi_unoplat_class_fieldmodel import ClassFieldModel +from data_models.chapi_unoplat_import import Import +from data_models.dspy.dspy_unoplat_fs_annotation_subset import DspyUnoplatAnnotationSubset +from data_models.dspy.dspy_unoplat_fs_function_subset import DspyUnoplatFunctionSubset + + +class DspyUnoplatNodeSubset(BaseModel): + node_name: Optional[str] = Field(default=None, alias="NodeName", description="This is name of the class.") + multiple_extend: Optional[bool] = Field(default=None, alias="MultipleExtend", description="this includes if class is inheriting multiple classes") + fields: Optional[List[ClassFieldModel]] = Field(default_factory=List, alias="Fields", description="This includes class fields") + extend: Optional[str] = Field(default=None, alias="Extend", description="This includes class inheritance") + imports: Optional[List[Import]] = Field(default_factory=List, alias="Imports", description="This includes class imports which can be used to infer types of fields") + annotations: Optional[List[DspyUnoplatAnnotationSubset]] = Field(default_factory=list, alias="Annotations") + functions: List[DspyUnoplatFunctionSubset] = Field(default_factory=list, alias="Functions") diff --git a/unoplat-code-confluence/data_models/unoplat_function_field_model.py b/unoplat-code-confluence/data_models/unoplat_function_field_model.py new file mode 100644 index 00000000..53c1ccf6 --- /dev/null +++ b/unoplat-code-confluence/data_models/unoplat_function_field_model.py @@ -0,0 +1,7 @@ +from typing import Optional +from pydantic import BaseModel, Field + + +class UnoplatFunctionFieldModel(BaseModel): + type_value: Optional[str] = Field(default=None, alias="TypeValue",description="function field name") + type_type: Optional[str] = Field(default=None, alias="TypeType",description="function field type. Can be incorrect sometime if variable is returned without declaration. So refer content attribute too to understand better.") diff --git a/codebase_understanding/downloader/__init__.py b/unoplat-code-confluence/downloader/__init__.py similarity index 100% rename from codebase_understanding/downloader/__init__.py rename to unoplat-code-confluence/downloader/__init__.py diff --git a/codebase_understanding/downloader/downloader.py b/unoplat-code-confluence/downloader/downloader.py similarity index 100% rename from codebase_understanding/downloader/downloader.py rename to unoplat-code-confluence/downloader/downloader.py diff --git a/unoplat-code-confluence/dspy_code_experiment.py b/unoplat-code-confluence/dspy_code_experiment.py new file mode 100644 index 00000000..7206db04 --- /dev/null +++ b/unoplat-code-confluence/dspy_code_experiment.py @@ -0,0 +1,147 @@ +import dspy +from data_models.dspy.dspy_o_function_summary import DspyFunctionSummary +from data_models.dspy.dspy_unoplat_fs_function_subset import DspyUnoplatFunctionSubset +from data_models.dspy.dspy_unoplat_fs_node_subset import DspyUnoplatNodeSubset +import json +from data_models.chapi_unoplat_node import Node +from data_models.dspy.dspy_unoplat_fs_annotation_subset import DspyUnoplatAnnotationSubset +from data_models.dspy.dspy_unoplat_fs_function_call_subset import DspyUnoplatFunctionCallSubset +from data_models.dspy.dspy_unoplat_fs_function_subset import DspyUnoplatFunctionSubset +from data_models.dspy.dspy_unoplat_fs_node_subset import DspyUnoplatNodeSubset +from dspy.primitives.assertions import assert_transform_module, backtrack_handler + + +ollama_codestral = dspy.OllamaLocal(model='codestral:22b-v0.1-q8_0') +dspy.configure(lm=ollama_codestral) + +ollama_llama_70b = dspy.OllamaLocal(model='llama3:70b-instruct') +dspy.configure(lm=ollama_llama_70b) + +# gpt3.5-turbo = dspy.OpenAI(model='gpt-4-1106-preview', max_tokens=1000, model_type='chat') + +# Define the signature for automatic assessments. +# class Assess(dspy.Signature): +# """Assess the quality of a tweet along the specified dimension.""" +# assessed_text = dspy.InputField() +# assessment_question = dspy.InputField() +# assessment_answer = dspy.OutputField(desc="Yes or No") + +# def metric(input, pred, trace=None): +# question, answer, tweet = gold.question, gold.answer, pred.output + +# engaging = "Does the assessed text make for a self-contained, engaging tweet?" +# correct = f"The text should answer `{question}` with `{answer}`. Does the assessed text contain this answer?" + +# with dspy.context(lm=gpt4T): +# correct = dspy.Predict(Assess)(assessed_text=tweet, assessment_question=correct) +# engaging = dspy.Predict(Assess)(assessed_text=tweet, assessment_question=engaging) + +# correct, engaging = [m.assessment_answer.lower() == 'yes' for m in [correct, engaging]] +# score = (correct + engaging) if correct and (len(tweet) <= 280) else 0 + +# if trace is not None: return score >= 2 +# return score / 2.0 + +# import dspy +# from dspy.evaluate import Evaluate +# from dspy.evaluate.metrics import answer_exact_match +# from dspy.teleprompt.signature_opt_typed import optimize_signature + +# turbo = dspy.OpenAI(model='gpt-3.5-turbo', max_tokens=4000) +# gpt4 = dspy.OpenAI(model='gpt-4', max_tokens=4000) +# dspy.settings.configure(lm=turbo) + +# evaluator = Evaluate(devset=devset, metric=answer_exact_match, num_threads=10, display_progress=True) + +# result = optimize_signature( +# student=dspy.TypedPredictor(QASignature), +# evaluator=evaluator, +# initial_prompts=6, +# n_iterations=100, +# max_examples=30, +# verbose=True, +# prompt_model=gpt4, +# ) + + +class CodeConfluenceFunctionSignature(dspy.Signature): + """This signature contains class metadata and function metadata with function content and returns descriptive function summary. Strictly respond only with dspy_function_summary in json format""" + dspy_class_subset: DspyUnoplatNodeSubset = dspy.InputField(desc="This will contain class metadata in json") + dspy_function_subset: DspyUnoplatFunctionSubset = dspy.InputField(desc="This will contain function metadata with function content in json") + dspy_function_summary: DspyFunctionSummary = dspy.OutputField(desc="This will contain function summary in json") + +# class CodeConfluenceFunctionSummaryOptimiserSignature(dspy.Signature): +# """This signature contains function objective and implementation summary and returns factual and very concise function implementation summary and objective. Strictly respond only with enhanced_objective_summary_output in json format""" +# objective_summary_input: DspyFunctionSummary = dspy.InputField(alias="objective_summary_input",desc="This will contain function summary in json") +# enhanced_objective_summary_output: DspyFunctionSummary = dspy.OutputField(alias="enhanced_objective_summary_output",desc="This will contain optimised function summary in json") + + +class CodeConfluenceFunctionModule(dspy.Module): + def __init__(self): + super().__init__() + self.generate_function_summary = dspy.TypedChainOfThought(CodeConfluenceFunctionSignature) + # self.optimise_function_summary = dspy.TypedPredictor(CodeConfluenceFunctionSummaryOptimiserSignature) + + def forward(self, function_metadata, class_metadata): + function_summary = self.generate_function_summary( dspy_class_subset = class_metadata, dspy_function_subset= function_metadata) + # dspy.Suggest( + # "observe the error if any and correct the json structure", + # ) + print(function_summary) + # optimised_function_summary = self.optimise_function_summary(objective_summary_input=function_summary.dspy_function_summary) + # dspy.Suggest( + # "observe the error if any and correct the json structure", + # ) + # print(optimised_function_summary.enhanced_objective_summary_output) + return function_summary + + + +if __name__ == "__main__": + #dspy_pipeline = assert_transform_module(CodeConfluenceFunctionModule(), backtrack_handler) + #dspy_pipeline = CodeConfluenceFunctionModule() + try: + with open('springstarterjava1_codes.json', 'r') as file: + springstarterjava1_codes = json.load(file) + except FileNotFoundError: + print("Error: File 'springstarterjava1_codes.json' not found.") + springstarterjava1_codes = [] + except json.JSONDecodeError: + print("Error: File 'springstarterjava1_codes.json' contains invalid JSON.") + springstarterjava1_codes = [] + + node_subsets = [] + + function_subsets = [] + count = 0 + for item in springstarterjava1_codes: + try: + node = Node(**item) + print("node name",node.node_name) + node_subset = DspyUnoplatNodeSubset( + NodeName=node.node_name, + Imports=node.imports, + Extend=node.extend, + MultipleExtend=node.multiple_extend, + Fields=node.fields, + Annotations=[DspyUnoplatAnnotationSubset(Name=annotation.name,KeyValues=annotation.key_values) for annotation in node.annotations] + ) + count = count + 1 + + for func in node.functions: + print(func) + function_subset = DspyUnoplatFunctionSubset( + Name=func.name, + ReturnType=func.return_type, + Annotations=[DspyUnoplatAnnotationSubset(Name=annotation.name,KeyValues=annotation.key_values) for annotation in node.annotations], + LocalVariables=func.local_variables, + Content=func.content, + FunctionCalls=[DspyUnoplatFunctionCallSubset(NodeName=call.node_name, FunctionName=call.function_name, Parameters=call.parameters) for call in func.function_calls] + ) + pred = dspy_pipeline(function_metadata=function_subset,class_metadata=node_subset) + + except AttributeError as e: + print(f"Error processing node data: {e}") + + + \ No newline at end of file diff --git a/unoplat-code-confluence/dspy_unoplat_code_confluence.ipynb b/unoplat-code-confluence/dspy_unoplat_code_confluence.ipynb new file mode 100644 index 00000000..85cdf7f8 --- /dev/null +++ b/unoplat-code-confluence/dspy_unoplat_code_confluence.ipynb @@ -0,0 +1,382 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/jghiya/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "import dspy\n", + "from data_models.dspy.dspy_o_function_summary import DspyFunctionSummary\n", + "from data_models.dspy.dspy_unoplat_fs_function_subset import DspyUnoplatFunctionSubset\n", + "from data_models.dspy.dspy_unoplat_fs_node_subset import DspyUnoplatNodeSubset" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "ollama_codestral = dspy.OllamaLocal(model='codestral:22b-v0.1-q8_0')\n", + "dspy.configure(lm=ollama_codestral)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "class CodeConfluenceFunctionSignature(dspy.Signature):\n", + " \"\"\"This signature contains class metadata and function metadata with function content and returns descriptive function summary. Only respond in this json format\"\"\"\n", + " dspy_class_subset: DspyUnoplatNodeSubset = dspy.InputField(desc=\"This will contain class metadata\")\n", + " dspy_function_subset: DspyUnoplatFunctionSubset = dspy.InputField(desc=\"This will contain function metadata with function content\")\n", + " dspy_function_summary: DspyFunctionSummary = dspy.OutputField(desc=\"This will contain function summary\")\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "class CodeConfluenceFunctionSummaryOptimiserSignature(dspy.Signature):\n", + " \"\"\"This signature contains function summary and returns optimised and accurate function summary. Only respond in this json format\"\"\"\n", + " dspy_function_input_summary: DspyFunctionSummary = dspy.InputField(desc=\"This will contain function summary\")\n", + " dspy_function_output_summary: DspyFunctionSummary = dspy.OutputField(desc=\"This will contain optimised function summary\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "class CodeConfluenceFunctionModule(dspy.Module):\n", + " def __init__(self):\n", + " super().__init__()\n", + " self.generate_function_summary = dspy.Predict(CodeConfluenceFunctionSignature)\n", + " self.optimise_function_summary = dspy.Predict(CodeConfluenceFunctionSummaryOptimiserSignature)\n", + "\n", + " def forward(self, function_metadata, class_metadata):\n", + " function_summary = self.generate_function_summary( dspy_class_subset = class_metadata, dspy_function_subset= function_metadata)\n", + " optimised_function_summary = self.optimise_function_summary(function_summary)\n", + " print(optimised_function_summary.dspy_function_output_summary)\n", + " return optimised_function_summary" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "dspy_pipeline = CodeConfluenceFunctionModule()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "node name Order\n", + "node_name='Order' multiple_extend=None fields=[ClassFieldModel(type_type='OrderPrimaryKey', type_key='key', annotations=[Annotation(name='PrimaryKey', key_values=[], position=Position(start_line=13, start_line_position=4, stop_line=13, stop_line_position=5))]), ClassFieldModel(type_type='Integer', type_key='productQuantity', annotations=[Annotation(name='Column', key_values=[ChapiUnoplatAnnotationKeyVal(key='\"product_quantity\"', value='\"product_quantity\"')], position=Position(start_line=16, start_line_position=4, stop_line=16, stop_line_position=30)), Annotation(name='CassandraType', key_values=[ChapiUnoplatAnnotationKeyVal(key='type', value='CassandraType.Name.INT')], position=Position(start_line=17, start_line_position=4, stop_line=17, stop_line_position=48))]), ClassFieldModel(type_type='String', type_key='productName', annotations=[Annotation(name='Column', key_values=[ChapiUnoplatAnnotationKeyVal(key='\"product_name\"', value='\"product_name\"')], position=Position(start_line=20, start_line_position=4, stop_line=20, stop_line_position=26)), Annotation(name='CassandraType', key_values=[ChapiUnoplatAnnotationKeyVal(key='type', value='CassandraType.Name.TEXT')], position=Position(start_line=21, start_line_position=4, stop_line=21, stop_line_position=49))]), ClassFieldModel(type_type='Float', type_key='productPrice', annotations=[Annotation(name='CassandraType', key_values=[ChapiUnoplatAnnotationKeyVal(key='type', value='CassandraType.Name.DECIMAL')], position=Position(start_line=24, start_line_position=4, stop_line=24, stop_line_position=52)), Annotation(name='Column', key_values=[ChapiUnoplatAnnotationKeyVal(key='\"product_price\"', value='\"product_price\"')], position=Position(start_line=25, start_line_position=4, stop_line=25, stop_line_position=27))]), ClassFieldModel(type_type='Instant', type_key='addedToOrderTimestamp', annotations=[Annotation(name='CassandraType', key_values=[ChapiUnoplatAnnotationKeyVal(key='type', value='CassandraType.Name.TIMESTAMP')], position=Position(start_line=28, start_line_position=4, stop_line=28, stop_line_position=54)), Annotation(name='Column', key_values=[ChapiUnoplatAnnotationKeyVal(key='\"added_to_order_at\"', value='\"added_to_order_at\"')], position=Position(start_line=29, start_line_position=4, stop_line=29, stop_line_position=31))])] extend=None imports=[Import(source='lombok.Data', usage_name=[]), Import(source='org.springframework.data.cassandra.core.mapping', usage_name=[]), Import(source='java.io.Serializable', usage_name=[]), Import(source='java.time.Instant', usage_name=[])] annotations=[DspyUnoplatAnnotationSubset(name='Table', key_values=[ChapiUnoplatAnnotationKeyVal(key='value', value='\"starter_orders\"')]), DspyUnoplatAnnotationSubset(name='Data', key_values=[])]\n", + "node name OrderRepository\n", + "node_name='OrderRepository' multiple_extend=None fields=[] extend='CassandraRepository' imports=[Import(source='org.springframework.data.cassandra.repository.CassandraRepository', usage_name=[]), Import(source='org.springframework.data.rest.core.annotation.RestResource', usage_name=[]), Import(source='org.springframework.stereotype.Repository', usage_name=[]), Import(source='java.util.List', usage_name=[]), Import(source='java.util.UUID', usage_name=[])] annotations=[DspyUnoplatAnnotationSubset(name='Repository', key_values=[]), DspyUnoplatAnnotationSubset(name='RestResource', key_values=[ChapiUnoplatAnnotationKeyVal(key='exported', value='false')])]\n", + "name='deleteByKeyOrderId' return_type='void' function_calls=[] annotations=[] position=Position(start_line=16, start_line_position=2, stop_line=16, stop_line_position=39) local_variables=[] body_hash=None content='void deleteByKeyOrderId(UUID orderId);'\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/jghiya/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dsp/templates/template_v2.py:35: SyntaxWarning: invalid escape sequence '\\s'\n", + " match = re.search(\"(.*)(\\s){(.*)}\\s(.*\\${.*})\", template)\n", + "/Users/jghiya/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dsp/templates/template_v2.py:42: SyntaxWarning: invalid escape sequence '\\s'\n", + " match = re.search(\"(.*)(\\s){(.*)}\", template)\n" + ] + }, + { + "ename": "AssertionError", + "evalue": "Need format_handler for dspy_class_subset of type ", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[7], line 46\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[38;5;28mprint\u001b[39m(func)\n\u001b[1;32m 38\u001b[0m function_subset \u001b[38;5;241m=\u001b[39m DspyUnoplatFunctionSubset(\n\u001b[1;32m 39\u001b[0m Name\u001b[38;5;241m=\u001b[39mfunc\u001b[38;5;241m.\u001b[39mname,\n\u001b[1;32m 40\u001b[0m ReturnType\u001b[38;5;241m=\u001b[39mfunc\u001b[38;5;241m.\u001b[39mreturn_type,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 44\u001b[0m FunctionCalls\u001b[38;5;241m=\u001b[39m[DspyUnoplatFunctionCallSubset(NodeName\u001b[38;5;241m=\u001b[39mcall\u001b[38;5;241m.\u001b[39mnode_name, FunctionName\u001b[38;5;241m=\u001b[39mcall\u001b[38;5;241m.\u001b[39mfunction_name, Parameters\u001b[38;5;241m=\u001b[39mcall\u001b[38;5;241m.\u001b[39mparameters) \u001b[38;5;28;01mfor\u001b[39;00m call \u001b[38;5;129;01min\u001b[39;00m func\u001b[38;5;241m.\u001b[39mfunction_calls]\n\u001b[1;32m 45\u001b[0m )\n\u001b[0;32m---> 46\u001b[0m pred \u001b[38;5;241m=\u001b[39m \u001b[43mdspy_pipeline\u001b[49m\u001b[43m(\u001b[49m\u001b[43mclass_metadata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnode_subset\u001b[49m\u001b[43m,\u001b[49m\u001b[43mfunction_metadata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfunction_subset\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;28mprint\u001b[39m(pred\u001b[38;5;241m.\u001b[39mdspy_function_output_summary)\n\u001b[1;32m 48\u001b[0m exit()\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dspy/primitives/program.py:26\u001b[0m, in \u001b[0;36mModule.__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[0;32m---> 26\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "Cell \u001b[0;32mIn[5], line 8\u001b[0m, in \u001b[0;36mCodeConfluenceFunctionModule.forward\u001b[0;34m(self, function_metadata, class_metadata)\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, function_metadata, class_metadata):\n\u001b[0;32m----> 8\u001b[0m function_summary \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate_function_summary\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[43mdspy_class_subset\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mclass_metadata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdspy_function_subset\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mfunction_metadata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 9\u001b[0m optimised_function_summary \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moptimise_function_summary(function_summary)\n\u001b[1;32m 10\u001b[0m \u001b[38;5;28mprint\u001b[39m(optimised_function_summary\u001b[38;5;241m.\u001b[39mdspy_function_output_summary)\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dspy/predict/predict.py:61\u001b[0m, in \u001b[0;36mPredict.__call__\u001b[0;34m(self, **kwargs)\u001b[0m\n\u001b[1;32m 60\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__call__\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[0;32m---> 61\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mforward\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dspy/predict/predict.py:103\u001b[0m, in \u001b[0;36mPredict.forward\u001b[0;34m(self, **kwargs)\u001b[0m\n\u001b[1;32m 100\u001b[0m template \u001b[38;5;241m=\u001b[39m signature_to_template(signature)\n\u001b[1;32m 102\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlm \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 103\u001b[0m x, C \u001b[38;5;241m=\u001b[39m \u001b[43mdsp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtemplate\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstage\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstage\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 104\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 105\u001b[0m \u001b[38;5;66;03m# Note: query_only=True means the instructions and examples are not included.\u001b[39;00m\n\u001b[1;32m 106\u001b[0m \u001b[38;5;66;03m# I'm not really sure why we'd want to do that, but it's there.\u001b[39;00m\n\u001b[1;32m 107\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m dsp\u001b[38;5;241m.\u001b[39msettings\u001b[38;5;241m.\u001b[39mcontext(lm\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlm, query_only\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m):\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dsp/primitives/predict.py:76\u001b[0m, in \u001b[0;36m_generate..do_generate\u001b[0;34m(example, stage, max_depth, original_example)\u001b[0m\n\u001b[1;32m 73\u001b[0m example \u001b[38;5;241m=\u001b[39m example\u001b[38;5;241m.\u001b[39mdemos_at(\u001b[38;5;28;01mlambda\u001b[39;00m d: d[stage])\n\u001b[1;32m 75\u001b[0m \u001b[38;5;66;03m# Generate and extract the fields.\u001b[39;00m\n\u001b[0;32m---> 76\u001b[0m prompt \u001b[38;5;241m=\u001b[39m \u001b[43mtemplate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexample\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 77\u001b[0m completions: \u001b[38;5;28mlist\u001b[39m[\u001b[38;5;28mdict\u001b[39m[\u001b[38;5;28mstr\u001b[39m, Any]] \u001b[38;5;241m=\u001b[39m generator(prompt, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 78\u001b[0m completions: \u001b[38;5;28mlist\u001b[39m[Example] \u001b[38;5;241m=\u001b[39m [template\u001b[38;5;241m.\u001b[39mextract(example, p) \u001b[38;5;28;01mfor\u001b[39;00m p \u001b[38;5;129;01min\u001b[39;00m completions]\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dsp/templates/template_v2.py:244\u001b[0m, in \u001b[0;36mTemplateV2.__call__\u001b[0;34m(self, example, show_guidelines)\u001b[0m\n\u001b[1;32m 241\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m long_query:\n\u001b[1;32m 242\u001b[0m example[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124maugmented\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 244\u001b[0m query \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mquery\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexample\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 246\u001b[0m \u001b[38;5;66;03m# if it has more lines than fields\u001b[39;00m\n\u001b[1;32m 247\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(query\u001b[38;5;241m.\u001b[39msplit(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)) \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mlen\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfields):\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dsp/templates/template_v2.py:105\u001b[0m, in \u001b[0;36mTemplateV2.query\u001b[0;34m(self, example, is_demo)\u001b[0m\n\u001b[1;32m 102\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28mtype\u001b[39m(x) \u001b[38;5;241m==\u001b[39m \u001b[38;5;28mstr\u001b[39m, \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNeed format_handler for \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfield\u001b[38;5;241m.\u001b[39minput_variable\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m of type \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(x)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(x\u001b[38;5;241m.\u001b[39msplit())\n\u001b[0;32m--> 105\u001b[0m formatted_value \u001b[38;5;241m=\u001b[39m \u001b[43mformat_handler\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexample\u001b[49m\u001b[43m[\u001b[49m\u001b[43mfield\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minput_variable\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 106\u001b[0m separator \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m field\u001b[38;5;241m.\u001b[39mseparator \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m formatted_value \u001b[38;5;28;01melse\u001b[39;00m field\u001b[38;5;241m.\u001b[39mseparator\n\u001b[1;32m 108\u001b[0m result\u001b[38;5;241m.\u001b[39mappend(\n\u001b[1;32m 109\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfield\u001b[38;5;241m.\u001b[39mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;132;01m{\u001b[39;00mseparator\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;132;01m{\u001b[39;00mformatted_value\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 110\u001b[0m )\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/unoplat-code-confluence-wBVFyKeX-py3.12/lib/python3.12/site-packages/dsp/templates/template_v2.py:102\u001b[0m, in \u001b[0;36mTemplateV2.query..format_handler\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 101\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mformat_handler\u001b[39m(x):\n\u001b[0;32m--> 102\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28mtype\u001b[39m(x) \u001b[38;5;241m==\u001b[39m \u001b[38;5;28mstr\u001b[39m, \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNeed format_handler for \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfield\u001b[38;5;241m.\u001b[39minput_variable\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m of type \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(x)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(x\u001b[38;5;241m.\u001b[39msplit())\n", + "\u001b[0;31mAssertionError\u001b[0m: Need format_handler for dspy_class_subset of type " + ] + } + ], + "source": [ + "import json\n", + "from data_models.chapi_unoplat_node import Node\n", + "from data_models.dspy.dspy_unoplat_fs_annotation_subset import DspyUnoplatAnnotationSubset\n", + "from data_models.dspy.dspy_unoplat_fs_function_call_subset import DspyUnoplatFunctionCallSubset\n", + "from data_models.dspy.dspy_unoplat_fs_function_subset import DspyUnoplatFunctionSubset\n", + "from data_models.dspy.dspy_unoplat_fs_node_subset import DspyUnoplatNodeSubset\n", + "\n", + "try:\n", + " with open('springstarterjava1_codes.json', 'r') as file:\n", + " springstarterjava1_codes = json.load(file)\n", + "except FileNotFoundError:\n", + " print(\"Error: File 'springstarterjava1_codes.json' not found.\")\n", + " springstarterjava1_codes = []\n", + "except json.JSONDecodeError:\n", + " print(\"Error: File 'springstarterjava1_codes.json' contains invalid JSON.\")\n", + " springstarterjava1_codes = []\n", + "\n", + "node_subsets = []\n", + "\n", + "function_subsets = []\n", + "count = 0\n", + "for item in springstarterjava1_codes:\n", + " try:\n", + " node = Node(**item)\n", + " print(\"node name\",node.node_name)\n", + " node_subset = DspyUnoplatNodeSubset(\n", + " NodeName=node.node_name,\n", + " Imports=node.imports,\n", + " Extend=node.extend,\n", + " MultipleExtend=node.multiple_extend,\n", + " Fields=node.fields,\n", + " Annotations=[DspyUnoplatAnnotationSubset(Name=annotation.name,KeyValues=annotation.key_values) for annotation in node.annotations]\n", + " )\n", + " count = count + 1\n", + " print(node_subset)\n", + " for func in node.functions:\n", + " print(func)\n", + " function_subset = DspyUnoplatFunctionSubset(\n", + " Name=func.name,\n", + " ReturnType=func.return_type,\n", + " Annotations=[DspyUnoplatAnnotationSubset(Name=annotation.name,KeyValues=annotation.key_values) for annotation in node.annotations],\n", + " LocalVariables=func.local_variables,\n", + " Content=func.content,\n", + " FunctionCalls=[DspyUnoplatFunctionCallSubset(NodeName=call.node_name, FunctionName=call.function_name, Parameters=call.parameters) for call in func.function_calls]\n", + " )\n", + " pred = dspy_pipeline(class_metadata=node_subset,function_metadata=function_subset)\n", + " print(pred.dspy_function_output_summary)\n", + " exit()\n", + " \n", + " \n", + " except AttributeError as e:\n", + " print(f\"Error processing node data: {e}\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "node_subsets[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import dspy\n", + "import os\n", + "from data_models.chapi_unoplat_function import Function\n", + "from data_models.chapi_unoplat_function_summary import FunctionSummary\n", + "from loader import JsonLoader\n", + "from loader import JsonParser\n", + "\n", + "ollama_mistral = dspy.OllamaLocal(model='mistral:7b-instruct-fp16')\n", + "dspy.configure(lm=ollama_mistral)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "class UnoplatFunctionSummary(dspy.Signature):\n", + " \"\"\"Generate accurate Summary only based on user metadata about function\"\"\"\n", + " \n", + " input: Function = dspy.InputField(desc=\"will contain all relevant function metadata\")\n", + " output: FunctionSummary = dspy.OutputField(desc=\"summarisation of function\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import List\n", + "from data_models.chapi_unoplat_class_summary import ClassSummary\n", + "from data_models.chapi_unoplat_node import Node\n", + "\n", + "\n", + "class UnoplatClassSummary(dspy.Signature):\n", + " \"\"\"Generate accurate summary only based on List of Function summaries and class metadata\"\"\"\n", + " input: List[FunctionSummary] = dspy.InputField(desc=\"This contains list of function summaries that needs to be understood in a step by step fashion.\")\n", + " context: Node = dspy.InputField(desc=\"This will contain class metadata.\" )\n", + " output: ClassSummary = dspy.OutputField(desc=\"This should contain class summary based on user shared metadata.\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import List\n", + "\n", + "class UnoplatModuleFunctionSummary(dspy.Module):\n", + " \"\"\"Generate summary for all functions of a class\"\"\"\n", + " def __init__(self):\n", + " super().__init__()\n", + " self.generate_function_summary = dspy.TypedChainOfThought(UnoplatFunctionSummary)\n", + " \n", + "\n", + " def forward(self, list_function = None) -> str:\n", + " function_summaries_context = []\n", + " \n", + " for item in list_function:\n", + " function_summaries_context.append(self.generate_function_summary(item))\n", + "\n", + " \n", + " return function_summaries_context \n", + "\n", + "\n", + " \n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from dspy.teleprompt import BootstrapFewShot\n", + "# Set up the teleprompter\n", + "teleprompter = BootstrapFewShot()\n", + "\n", + "# Compile the program without a training dataset\n", + "compiled_summary = teleprompter.compile(UnoplatModuleClassSummary(),trainset=None)\n", + "\n", + "#cot_predictor = dspy.TypedChainOfThought(UnoplatFunctionSummary)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iload_json = JsonLoader()\n", + "\n", + "iparse_json = JsonParser()\n", + "\n", + "\n", + "meadata_classes = iload_json.load_json_from_file(\"springstarterjava1_codes.json\")\n", + "\n", + "package_dict = iparse_json.parse_json_to_nodes(meadata_classes,None)\n", + "\n", + "print(package_dict.keys)\n", + "\n", + "function: Function = package_dict['com.datastax.examples.order'][2].functions[0]\n", + "print(function)\n", + "\n", + "prediction = cot_predictor(input=function,options={\"format\": \"json\"})\n", + "\n", + "ollama_mistral.inspect_history(n=1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(prediction)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/codebase_understanding/example_config.json b/unoplat-code-confluence/example_config.json similarity index 87% rename from codebase_understanding/example_config.json rename to unoplat-code-confluence/example_config.json index a751256b..301f1f15 100644 --- a/codebase_understanding/example_config.json +++ b/unoplat-code-confluence/example_config.json @@ -6,6 +6,6 @@ "repo": { "download_url": "archguard/archguard", "download_directory": "/Users/jayghiya/Documents/unoplat", - "github_token": "put your token heregi" + "github_token": "insert your github_token" } } \ No newline at end of file diff --git a/unoplat-code-confluence/ingestion-finetune/input-request-body.json b/unoplat-code-confluence/ingestion-finetune/input-request-body.json new file mode 100755 index 00000000..e39ac01c --- /dev/null +++ b/unoplat-code-confluence/ingestion-finetune/input-request-body.json @@ -0,0 +1,4631 @@ +[ + { + "NodeName": "ReactiveServiceForMathOperation", + "Module": "root", + "Type": "CLASS", + "Package": "org.acme.impl", + "FilePath": "main/java/org/acme/impl/ReactiveServiceForMathOperation.java", + "Functions": [ + { + "Name": "processMessage", + "ReturnType": "CompletionStage", + "Parameters": [ + { + "TypeValue": "samplePojo", + "TypeType": "SamplePojo" + } + ], + "FunctionCalls": [ + { + "Package": "org.acme.impl", + "NodeName": "Math", + "FunctionName": "sin", + "Parameters": [ + { + "TypeValue": "StrictMath.cos(res)", + "TypeType": "" + } + ], + "Position": { + "StartLine": 19, + "StartLinePosition": 28, + "StopLine": 19, + "StopLinePosition": 51 + } + }, + { + "Package": "org.acme.impl", + "NodeName": "StrictMath", + "FunctionName": "cos", + "Parameters": [ + { + "TypeValue": "res", + "TypeType": "" + } + ], + "Position": { + "StartLine": 19, + "StartLinePosition": 43, + "StopLine": 19, + "StopLinePosition": 50 + } + }, + { + "Package": "java.util.concurrent", + "Type": "CHAIN", + "NodeName": "CompletableFuture", + "FunctionName": "completedStage", + "Parameters": [ + { + "TypeValue": "null", + "TypeType": "" + } + ], + "Position": { + "StartLine": 22, + "StartLinePosition": 37, + "StopLine": 22, + "StopLinePosition": 56 + } + } + ], + "Position": { + "StartLine": 14, + "StartLinePosition": 11, + "StopLine": 24, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "samplePojo", + "TypeType": "SamplePojo" + }, + { + "TypeValue": "res", + "TypeType": "double" + }, + { + "TypeValue": "i", + "TypeType": "int" + } + ], + "BodyHash": 258195030, + "Content": "CompletionStage processMessage(SamplePojo samplePojo) { double res = 0; for (int i = 0; i < 1000; i++) { res += Math.sin(StrictMath.cos(res)) * 2; } samplePojo.waveFormData= samplePojo.waveFormData + res; return CompletableFuture.completedStage(null); " + } + ], + "Annotations": [ + { + "Name": "ApplicationScoped", + "Position": { + "StartLine": 10, + "StopLine": 10, + "StopLinePosition": 1 + } + } + ], + "Imports": [ + { + "Source": "java.util.concurrent.CompletableFuture" + }, + { + "Source": "java.util.concurrent.CompletionStage" + }, + { + "Source": "javax.enterprise.context.ApplicationScoped" + }, + { + "Source": "org.acme.model.SamplePojo" + } + ], + "Position": { + "StartLine": 11, + "StartLinePosition": 7, + "StopLine": 26 + }, + "Content": "class ReactiveServiceForMathOperation { public CompletionStage processMessage(SamplePojo samplePojo) { double res = 0; for (int i = 0; i < 1000; i++) { res += Math.sin(StrictMath.cos(res)) * 2; } samplePojo.waveFormData= samplePojo.waveFormData + res; return CompletableFuture.completedStage(null); }" + }, + { + "NodeName": "ReactiveKafkaProducerService", + "Module": "root", + "Type": "CLASS", + "Package": "org.acme.impl", + "FilePath": "main/java/org/acme/impl/ReactiveKafkaProducerService.java", + "Fields": [ + { + "TypeType": "Emitter", + "TypeKey": "emitter", + "Annotations": [ + { + "Name": "Inject", + "Position": { + "StartLine": 25, + "StartLinePosition": 4, + "StopLine": 25, + "StopLinePosition": 5 + } + }, + { + "Name": "Channel", + "KeyValues": [ + { + "Key": "\"kafka_topic\"", + "Value": "\"kafka_topic\"" + } + ], + "Position": { + "StartLine": 26, + "StartLinePosition": 4, + "StopLine": 26, + "StopLinePosition": 26 + } + } + ] + }, + { + "TypeType": "Logger", + "TypeValue": "Logger.getLogger(ReactiveKafkaProducerService.class)", + "TypeKey": "LOG" + }, + { + "TypeType": "SpartansKey", + "TypeKey": "spartansKey" + }, + { + "TypeType": "SpartansValue", + "TypeKey": "spartansValue" + } + ], + "Implements": [ + "org.acme.infc.ReactiveKafkaProducerServiceInterface" + ], + "Functions": [ + { + "Name": "postMessageToKafka", + "ReturnType": "CompletionStage", + "Parameters": [ + { + "TypeValue": "message", + "TypeType": "SamplePojo" + } + ], + "FunctionCalls": [ + { + "Package": "org.acme.impl", + "NodeName": "Math", + "FunctionName": "sin", + "Parameters": [ + { + "TypeValue": "StrictMath.cos(res)", + "TypeType": "" + } + ], + "Position": { + "StartLine": 41, + "StartLinePosition": 28, + "StopLine": 41, + "StopLinePosition": 51 + } + }, + { + "Package": "org.acme.impl", + "NodeName": "StrictMath", + "FunctionName": "cos", + "Parameters": [ + { + "TypeValue": "res", + "TypeType": "" + } + ], + "Position": { + "StartLine": 41, + "StartLinePosition": 43, + "StopLine": 41, + "StopLinePosition": 50 + } + }, + { + "Package": "com.spartans.schema", + "Type": "CHAIN", + "NodeName": "SpartansKey", + "FunctionName": "newBuilder", + "Position": { + "StartLine": 45, + "StartLinePosition": 34, + "StopLine": 45, + "StopLinePosition": 45 + } + }, + { + "Package": "com.spartans.schema", + "Type": "CHAIN", + "NodeName": "SpartansKey", + "FunctionName": "setPatientMrnIdentifier", + "Parameters": [ + { + "TypeValue": "\"pid123\"", + "TypeType": "" + } + ], + "Position": { + "StartLine": 45, + "StartLinePosition": 47, + "StopLine": 45, + "StopLinePosition": 79 + } + }, + { + "Package": "com.spartans.schema", + "Type": "CHAIN", + "NodeName": "SpartansKey", + "FunctionName": "build", + "Position": { + "StartLine": 46, + "StartLinePosition": 9, + "StopLine": 46, + "StopLinePosition": 15 + } + }, + { + "Package": "com.spartans.schema", + "Type": "CHAIN", + "NodeName": "SpartansValue", + "FunctionName": "newBuilder", + "Position": { + "StartLine": 48, + "StartLinePosition": 38, + "StopLine": 48, + "StopLinePosition": 49 + } + }, + { + "Package": "com.spartans.schema", + "Type": "CHAIN", + "NodeName": "SpartansValue", + "FunctionName": "setMedData", + "Parameters": [ + { + "TypeValue": "ByteBuffer.wrap(\"sample_waveform_data.\".getBytes())", + "TypeType": "" + } + ], + "Position": { + "StartLine": 49, + "StartLinePosition": 25, + "StopLine": 49, + "StopLinePosition": 87 + } + }, + { + "Package": "java.nio", + "Type": "CHAIN", + "NodeName": "ByteBuffer", + "FunctionName": "wrap", + "Parameters": [ + { + "TypeValue": "\"sample_waveform_data.\".getBytes()", + "TypeType": "" + } + ], + "Position": { + "StartLine": 49, + "StartLinePosition": 47, + "StopLine": 49, + "StopLinePosition": 86 + } + }, + { + "Package": "org.acme.impl", + "NodeName": "\"sample_waveform_data.\"", + "FunctionName": "getBytes", + "Position": { + "StartLine": 49, + "StartLinePosition": 76, + "StopLine": 49, + "StopLinePosition": 85 + } + }, + { + "Package": "com.spartans.schema", + "Type": "CHAIN", + "NodeName": "SpartansValue", + "FunctionName": "build", + "Position": { + "StartLine": 50, + "StartLinePosition": 25, + "StopLine": 50, + "StopLinePosition": 31 + } + }, + { + "Package": "org.eclipse.microprofile.reactive.messaging", + "Type": "CHAIN", + "NodeName": "Emitter", + "FunctionName": "send", + "Parameters": [ + { + "TypeValue": "prepareDataForMessageBroker(spartansKey,spartansValue).withAck(()->{returnCompletableFuture.completedStage(null);}).withNack((t)->{returnCompletableFuture.failedStage(t);})", + "TypeType": "" + } + ], + "Position": { + "StartLine": 53, + "StartLinePosition": 17, + "StopLine": 61, + "StopLinePosition": 11 + } + }, + { + "Package": "org.acme.impl", + "NodeName": "ReactiveKafkaProducerService", + "FunctionName": "prepareDataForMessageBroker", + "Parameters": [ + { + "TypeValue": "spartansKey", + "TypeType": "" + }, + { + "TypeValue": "spartansValue", + "TypeType": "" + } + ], + "Position": { + "StartLine": 53, + "StartLinePosition": 22, + "StopLine": 53, + "StopLinePosition": 75 + } + }, + { + "Package": "org.acme.impl", + "NodeName": "prepareDataForMessageBroker", + "FunctionName": "withAck", + "Parameters": [ + { + "TypeValue": "()->{returnCompletableFuture.completedStage(null);}", + "TypeType": "" + } + ], + "Position": { + "StartLine": 54, + "StartLinePosition": 10, + "StopLine": 57, + "StopLinePosition": 10 + } + }, + { + "Package": "java.util.concurrent", + "Type": "CHAIN", + "NodeName": "CompletableFuture", + "FunctionName": "completedStage", + "Parameters": [ + { + "TypeValue": "null", + "TypeType": "" + } + ], + "Position": { + "StartLine": 56, + "StartLinePosition": 37, + "StopLine": 56, + "StopLinePosition": 56 + } + }, + { + "Package": "org.acme.impl", + "NodeName": "prepareDataForMessageBroker", + "FunctionName": "withNack", + "Parameters": [ + { + "TypeValue": "(t)->{returnCompletableFuture.failedStage(t);}", + "TypeType": "" + } + ], + "Position": { + "StartLine": 58, + "StartLinePosition": 10, + "StopLine": 61, + "StopLinePosition": 10 + } + }, + { + "Package": "java.util.concurrent", + "Type": "CHAIN", + "NodeName": "CompletableFuture", + "FunctionName": "failedStage", + "Parameters": [ + { + "TypeValue": "t", + "TypeType": "" + } + ], + "Position": { + "StartLine": 60, + "StartLinePosition": 37, + "StopLine": 60, + "StopLinePosition": 50 + } + }, + { + "Package": "java.util.concurrent", + "Type": "CHAIN", + "NodeName": "CompletableFuture", + "FunctionName": "completedStage", + "Parameters": [ + { + "TypeValue": "null", + "TypeType": "" + } + ], + "Position": { + "StartLine": 62, + "StartLinePosition": 35, + "StopLine": 62, + "StopLinePosition": 54 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 35, + "StartLinePosition": 4, + "StopLine": 35, + "StopLinePosition": 5 + } + } + ], + "Position": { + "StartLine": 36, + "StartLinePosition": 11, + "StopLine": 64, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "message", + "TypeType": "SamplePojo" + }, + { + "TypeValue": "res", + "TypeType": "double" + }, + { + "TypeValue": "i", + "TypeType": "int" + } + ], + "BodyHash": -760449945, + "Content": "CompletionStage postMessageToKafka(SamplePojo message) { //doing random math operation to induce delay double res = 0; for (int i = 0; i < 1000; i++) { res += Math.sin(StrictMath.cos(res)) * 2; } spartansKey = SpartansKey.newBuilder().setPatientMrnIdentifier(\"pid123\") .build(); spartansValue = SpartansValue.newBuilder() .setMedData(ByteBuffer.wrap(\"sample_waveform_data.\".getBytes())) .build(); emitter.send(prepareDataForMessageBroker(spartansKey,spartansValue) .withAck(() -> { return CompletableFuture.completedStage(null); }) .withNack((t) -> { return CompletableFuture.failedStage(t); })); return CompletableFuture.completedStage(null); " + }, + { + "Name": "prepareDataForMessageBroker", + "ReturnType": "Message", + "Parameters": [ + { + "TypeValue": "spartansKey", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "spartansValue", + "TypeType": "SpartansValue" + } + ], + "FunctionCalls": [ + { + "Package": "org.eclipse.microprofile.reactive.messaging", + "Type": "CHAIN", + "NodeName": "Message", + "FunctionName": "of", + "Parameters": [ + { + "TypeValue": "spartansValue", + "TypeType": "" + } + ], + "Position": { + "StartLine": 70, + "StartLinePosition": 49, + "StopLine": 70, + "StopLinePosition": 65 + } + }, + { + "Package": "org.eclipse.microprofile.reactive.messaging", + "Type": "CHAIN", + "NodeName": "Message", + "FunctionName": "addMetadata", + "Parameters": [ + { + "TypeValue": "OutgoingKafkaRecordMetadata.builder().withKey(spartansKey).build()", + "TypeType": "" + } + ], + "Position": { + "StartLine": 70, + "StartLinePosition": 67, + "StopLine": 75, + "StopLinePosition": 8 + } + }, + { + "Package": "io.smallrye.reactive.messaging.kafka.api", + "Type": "CHAIN", + "NodeName": "OutgoingKafkaRecordMetadata", + "FunctionName": "withKey", + "Parameters": [ + { + "TypeValue": "spartansKey", + "TypeType": "" + } + ], + "Position": { + "StartLine": 72, + "StartLinePosition": 13, + "StopLine": 72, + "StopLinePosition": 32 + } + }, + { + "Package": "io.smallrye.reactive.messaging.kafka.api", + "Type": "CHAIN", + "NodeName": "OutgoingKafkaRecordMetadata", + "FunctionName": "build", + "Position": { + "StartLine": 73, + "StartLinePosition": 13, + "StopLine": 73, + "StopLinePosition": 19 + } + } + ], + "Position": { + "StartLine": 68, + "StartLinePosition": 14, + "StopLine": 79, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "message", + "TypeType": "Message" + }, + { + "TypeValue": "res", + "TypeType": "double" + }, + { + "TypeValue": "i", + "TypeType": "int" + }, + { + "TypeValue": "spartansKey", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "spartansValue", + "TypeType": "SpartansValue" + } + ], + "BodyHash": -1943320213, + "Content": "Message prepareDataForMessageBroker(SpartansKey spartansKey ,SpartansValue spartansValue) { Message message = Message.of(spartansValue).addMetadata( OutgoingKafkaRecordMetadata.builder() .withKey(spartansKey) .build() ); return message; " + } + ], + "Annotations": [ + { + "Name": "ApplicationScoped", + "Position": { + "StartLine": 22, + "StopLine": 22, + "StopLinePosition": 1 + } + } + ], + "FunctionCalls": [ + { + "Package": "org.eclipse.microprofile.reactive.messaging", + "Type": "FIELD", + "NodeName": "Emitter", + "Position": { + "StartLine": 27, + "StartLinePosition": 4, + "StopLine": 27, + "StopLinePosition": 34 + } + }, + { + "Package": "org.jboss.logging", + "Type": "FIELD", + "NodeName": "Logger", + "Position": { + "StartLine": 30, + "StartLinePosition": 25, + "StopLine": 30, + "StopLinePosition": 90 + } + }, + { + "Package": "com.spartans.schema", + "Type": "FIELD", + "NodeName": "SpartansKey", + "Position": { + "StartLine": 32, + "StartLinePosition": 4, + "StopLine": 32, + "StopLinePosition": 27 + } + }, + { + "Package": "com.spartans.schema", + "Type": "FIELD", + "NodeName": "SpartansValue", + "Position": { + "StartLine": 33, + "StartLinePosition": 4, + "StopLine": 33, + "StopLinePosition": 31 + } + } + ], + "Imports": [ + { + "Source": "java.nio.ByteBuffer" + }, + { + "Source": "java.util.concurrent.CompletableFuture" + }, + { + "Source": "java.util.concurrent.CompletionStage" + }, + { + "Source": "javax.enterprise.context.ApplicationScoped" + }, + { + "Source": "javax.inject.Inject" + }, + { + "Source": "org.acme.infc.ReactiveKafkaProducerServiceInterface" + }, + { + "Source": "org.acme.model.SamplePojo" + }, + { + "Source": "org.eclipse.microprofile.reactive.messaging.Channel" + }, + { + "Source": "org.eclipse.microprofile.reactive.messaging.Emitter" + }, + { + "Source": "org.eclipse.microprofile.reactive.messaging.Message" + }, + { + "Source": "org.jboss.logging.Logger" + }, + { + "Source": "com.spartans.schema.SpartansKey" + }, + { + "Source": "com.spartans.schema.SpartansValue" + }, + { + "Source": "io.smallrye.reactive.messaging.kafka.api.OutgoingKafkaRecordMetadata" + }, + { + "Source": "io.vertx.mutiny.core.buffer.Buffer" + } + ], + "Position": { + "StartLine": 23, + "StartLinePosition": 7, + "StopLine": 81 + }, + "Content": "class ReactiveKafkaProducerService implements ReactiveKafkaProducerServiceInterface { @Inject @Channel(\"kafka_topic\") Emitter emitter; private static final Logger LOG = Logger.getLogger(ReactiveKafkaProducerService.class); SpartansKey spartansKey; SpartansValue spartansValue; @Override public CompletionStage postMessageToKafka(SamplePojo message) { //doing random math operation to induce delay double res = 0; for (int i = 0; i < 1000; i++) { res += Math.sin(StrictMath.cos(res)) * 2; } spartansKey = SpartansKey.newBuilder().setPatientMrnIdentifier(\"pid123\") .build(); spartansValue = SpartansValue.newBuilder() .setMedData(ByteBuffer.wrap(\"sample_waveform_data.\".getBytes())) .build(); emitter.send(prepareDataForMessageBroker(spartansKey,spartansValue) .withAck(() -> { return CompletableFuture.completedStage(null); }) .withNack((t) -> { return CompletableFuture.failedStage(t); })); return CompletableFuture.completedStage(null); } protected Message prepareDataForMessageBroker(SpartansKey spartansKey ,SpartansValue spartansValue) { Message message = Message.of(spartansValue).addMetadata( OutgoingKafkaRecordMetadata.builder() .withKey(spartansKey) .build() ); return message; }" + }, + { + "NodeName": "ReactiveKafkaProducerServiceInterface", + "Module": "root", + "Type": "INTERFACE", + "Package": "org.acme.infc", + "FilePath": "main/java/org/acme/infc/ReactiveKafkaProducerServiceInterface.java", + "Functions": [ + { + "Name": "postMessageToKafka", + "ReturnType": "CompletionStage", + "Position": { + "StartLine": 9, + "StartLinePosition": 11, + "StopLine": 9, + "StopLinePosition": 71 + }, + "Content": "CompletionStage postMessageToKafka(SamplePojo message);" + } + ], + "Imports": [ + { + "Source": "java.util.concurrent.CompletionStage" + }, + { + "Source": "org.acme.model.SamplePojo" + } + ], + "Content": "package org.acme.infc;" + }, + { + "NodeName": "GreetingResource", + "Module": "root", + "Type": "CLASS", + "Package": "org.acme", + "FilePath": "main/java/org/acme/GreetingResource.java", + "Fields": [ + { + "TypeType": "String", + "TypeKey": "keyStorePassword", + "Annotations": [ + { + "Name": "ConfigProperty", + "KeyValues": [ + { + "Key": "name", + "Value": "\"REDPANDA_SSL_KEYSTORE_PASSWORD\"" + } + ], + "Position": { + "StartLine": 25, + "StartLinePosition": 4, + "StopLine": 25, + "StopLinePosition": 59 + } + } + ] + }, + { + "TypeType": "ReactiveKafkaProducerServiceInterface", + "TypeKey": "reactiveKafkaProducerServiceInterface", + "Annotations": [ + { + "Name": "Inject", + "Position": { + "StartLine": 28, + "StartLinePosition": 4, + "StopLine": 28, + "StopLinePosition": 5 + } + } + ] + } + ], + "Functions": [ + { + "Name": "init", + "ReturnType": "void", + "FunctionCalls": [ + { + "Package": "org.acme", + "NodeName": "System.out", + "FunctionName": "println", + "Parameters": [ + { + "TypeValue": "\"Print password:\"+keyStorePassword", + "TypeType": "" + } + ], + "Position": { + "StartLine": 34, + "StartLinePosition": 19, + "StopLine": 34, + "StopLinePosition": 61 + } + } + ], + "Annotations": [ + { + "Name": "PostConstruct", + "Position": { + "StartLine": 31, + "StartLinePosition": 4, + "StopLine": 31, + "StopLinePosition": 5 + } + } + ], + "Position": { + "StartLine": 32, + "StartLinePosition": 11, + "StopLine": 35, + "StopLinePosition": 4 + }, + "BodyHash": 1542813694, + "Content": "void init() { System.out.println(\"Print password:\"+keyStorePassword); " + }, + { + "Name": "handlePostRequest", + "ReturnType": "Uni", + "Parameters": [ + { + "TypeValue": "message", + "TypeType": "SamplePojo" + } + ], + "FunctionCalls": [ + { + "Package": "io.smallrye.mutiny", + "Type": "CHAIN", + "NodeName": "Uni", + "FunctionName": "createFrom", + "Position": { + "StartLine": 44, + "StartLinePosition": 18, + "StopLine": 44, + "StopLinePosition": 29 + } + }, + { + "Package": "io.smallrye.mutiny", + "Type": "CHAIN", + "NodeName": "Uni", + "FunctionName": "completionStage", + "Parameters": [ + { + "TypeValue": "reactiveKafkaProducerServiceInterface.postMessageToKafka(message)", + "TypeType": "" + } + ], + "Position": { + "StartLine": 44, + "StartLinePosition": 31, + "StopLine": 44, + "StopLinePosition": 112 + } + }, + { + "Package": "org.acme.infc", + "Type": "CHAIN", + "NodeName": "ReactiveKafkaProducerServiceInterface", + "FunctionName": "postMessageToKafka", + "Parameters": [ + { + "TypeValue": "message", + "TypeType": "" + } + ], + "Position": { + "StartLine": 44, + "StartLinePosition": 85, + "StopLine": 44, + "StopLinePosition": 111 + } + }, + { + "Package": "io.smallrye.mutiny", + "Type": "CHAIN", + "NodeName": "Uni", + "FunctionName": "map", + "Parameters": [ + { + "TypeValue": "element->Response.accepted().build()", + "TypeType": "" + } + ], + "Position": { + "StartLine": 45, + "StartLinePosition": 16, + "StopLine": 45, + "StopLinePosition": 58 + } + }, + { + "Package": "javax.ws.rs.core", + "Type": "CHAIN", + "NodeName": "Response", + "FunctionName": "accepted", + "Position": { + "StartLine": 45, + "StartLinePosition": 40, + "StopLine": 45, + "StopLinePosition": 49 + } + }, + { + "Package": "javax.ws.rs.core", + "Type": "CHAIN", + "NodeName": "Response", + "FunctionName": "build", + "Position": { + "StartLine": 45, + "StartLinePosition": 51, + "StopLine": 45, + "StopLinePosition": 57 + } + }, + { + "Package": "io.smallrye.mutiny", + "Type": "CHAIN", + "NodeName": "Uni", + "FunctionName": "onFailure", + "Position": { + "StartLine": 46, + "StartLinePosition": 17, + "StopLine": 46, + "StopLinePosition": 27 + } + }, + { + "Package": "io.smallrye.mutiny", + "Type": "CHAIN", + "NodeName": "Uni", + "FunctionName": "recoverWithItem", + "Parameters": [ + { + "TypeValue": "Response.serverError().build()", + "TypeType": "" + } + ], + "Position": { + "StartLine": 46, + "StartLinePosition": 29, + "StopLine": 46, + "StopLinePosition": 75 + } + }, + { + "Package": "javax.ws.rs.core", + "Type": "CHAIN", + "NodeName": "Response", + "FunctionName": "serverError", + "Position": { + "StartLine": 46, + "StartLinePosition": 54, + "StopLine": 46, + "StopLinePosition": 66 + } + }, + { + "Package": "javax.ws.rs.core", + "Type": "CHAIN", + "NodeName": "Response", + "FunctionName": "build", + "Position": { + "StartLine": 46, + "StartLinePosition": 68, + "StopLine": 46, + "StopLinePosition": 74 + } + } + ], + "Annotations": [ + { + "Name": "Timeout", + "KeyValues": [ + { + "Key": "value", + "Value": "4000" + } + ], + "Position": { + "StartLine": 38, + "StartLinePosition": 4, + "StopLine": 38, + "StopLinePosition": 25 + } + }, + { + "Name": "NonBlocking", + "Position": { + "StartLine": 39, + "StartLinePosition": 4, + "StopLine": 39, + "StopLinePosition": 5 + } + }, + { + "Name": "Consumes", + "KeyValues": [ + { + "Key": "MediaType.APPLICATION_JSON", + "Value": "MediaType.APPLICATION_JSON" + } + ], + "Position": { + "StartLine": 40, + "StartLinePosition": 4, + "StopLine": 40, + "StopLinePosition": 40 + } + }, + { + "Name": "POST", + "Position": { + "StartLine": 41, + "StartLinePosition": 4, + "StopLine": 41, + "StopLinePosition": 5 + } + } + ], + "Position": { + "StartLine": 42, + "StartLinePosition": 11, + "StopLine": 48, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "message", + "TypeType": "SamplePojo" + } + ], + "BodyHash": -1046272850, + "Content": "Uni handlePostRequest(SamplePojo message) { return Uni.createFrom().completionStage(reactiveKafkaProducerServiceInterface.postMessageToKafka(message)) .map(element -> Response.accepted().build()) .onFailure().recoverWithItem(Response.serverError().build()); " + } + ], + "Annotations": [ + { + "Name": "Path", + "KeyValues": [ + { + "Key": "\"/hello\"", + "Value": "\"/hello\"" + } + ], + "Position": { + "StartLine": 22, + "StopLine": 22, + "StopLinePosition": 14 + } + } + ], + "FunctionCalls": [ + { + "Package": "org.acme.infc", + "Type": "FIELD", + "NodeName": "ReactiveKafkaProducerServiceInterface", + "Position": { + "StartLine": 29, + "StartLinePosition": 4, + "StopLine": 29, + "StopLinePosition": 79 + } + } + ], + "Imports": [ + { + "Source": "javax.annotation.PostConstruct" + }, + { + "Source": "javax.inject.Inject" + }, + { + "Source": "javax.ws.rs.Consumes" + }, + { + "Source": "javax.ws.rs.GET" + }, + { + "Source": "javax.ws.rs.POST" + }, + { + "Source": "javax.ws.rs.Path" + }, + { + "Source": "javax.ws.rs.Produces" + }, + { + "Source": "javax.ws.rs.core.MediaType" + }, + { + "Source": "org.acme.infc.ReactiveKafkaProducerServiceInterface" + }, + { + "Source": "org.acme.model.SamplePojo" + }, + { + "Source": "org.eclipse.microprofile.config.inject.ConfigProperty" + }, + { + "Source": "org.eclipse.microprofile.faulttolerance.Timeout" + }, + { + "Source": "javax.ws.rs.core.Response" + }, + { + "Source": "io.smallrye.common.annotation.NonBlocking" + }, + { + "Source": "io.smallrye.mutiny.Uni" + } + ], + "Position": { + "StartLine": 23, + "StartLinePosition": 7, + "StopLine": 49 + }, + "Content": "class GreetingResource { @ConfigProperty(name = \"REDPANDA_SSL_KEYSTORE_PASSWORD\") private String keyStorePassword; @Inject ReactiveKafkaProducerServiceInterface reactiveKafkaProducerServiceInterface; @PostConstruct public void init() { System.out.println(\"Print password:\"+keyStorePassword); } @Timeout(value = 4000) @NonBlocking @Consumes(MediaType.APPLICATION_JSON) @POST public Uni handlePostRequest(SamplePojo message) { return Uni.createFrom().completionStage(reactiveKafkaProducerServiceInterface.postMessageToKafka(message)) .map(element -> Response.accepted().build()) .onFailure().recoverWithItem(Response.serverError().build()); }" + }, + { + "NodeName": "SamplePojo", + "Module": "root", + "Type": "CLASS", + "Package": "org.acme.model", + "FilePath": "main/java/org/acme/model/SamplePojo.java", + "Fields": [ + { + "TypeType": "String", + "TypeKey": "patientId" + }, + { + "TypeType": "String", + "TypeKey": "waveFormData" + } + ], + "Position": { + "StartLine": 3, + "StartLinePosition": 7, + "StopLine": 10 + }, + "Content": "class SamplePojo { public String patientId; public String waveFormData;" + }, + { + "NodeName": "SpartansValue", + "Module": "root", + "Type": "CLASS", + "Package": "com.spartans.schema", + "FilePath": "main/java/com/spartans/schema/SpartansValue.java", + "Fields": [ + { + "TypeType": "org", + "TypeValue": "neworg.apache.avro.Schema.Parser().parse(\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SpartansValue\\\",\\\"namespace\\\":\\\"com.spartans.schema\\\",\\\"doc\\\":\\\"This record contins metadata about the payload\\\",\\\"fields\\\":[{\\\"name\\\":\\\"medData\\\",\\\"type\\\":[\\\"null\\\",\\\"bytes\\\"],\\\"doc\\\":\\\"This field indicates the payload in its byte form\\\",\\\"default\\\":null}]}\")", + "TypeKey": "SCHEMA$" + }, + { + "TypeType": "SpecificData", + "TypeValue": "newSpecificData()", + "TypeKey": "MODEL$" + }, + { + "TypeType": "BinaryMessageEncoder", + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeKey": "ENCODER" + }, + { + "TypeType": "BinaryMessageDecoder", + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeKey": "DECODER" + }, + { + "TypeType": "java", + "TypeKey": "medData" + }, + { + "TypeType": "java", + "TypeKey": "medData", + "Annotations": [ + { + "Name": "org.apache.avro.specific.AvroGenerated", + "Position": { + "StartLine": 167, + "StartLinePosition": 2, + "StopLine": 167, + "StopLinePosition": 28 + } + } + ] + }, + { + "TypeType": "org", + "TypeValue": "(org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$)", + "TypeKey": "WRITER$", + "Annotations": [ + { + "Name": "SuppressWarnings", + "KeyValues": [ + { + "Key": "\"unchecked\"", + "Value": "\"unchecked\"" + } + ], + "Position": { + "StartLine": 262, + "StartLinePosition": 2, + "StopLine": 262, + "StopLinePosition": 31 + } + } + ] + }, + { + "TypeType": "org", + "TypeValue": "(org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$)", + "TypeKey": "READER$", + "Annotations": [ + { + "Name": "SuppressWarnings", + "KeyValues": [ + { + "Key": "\"unchecked\"", + "Value": "\"unchecked\"" + } + ], + "Position": { + "StartLine": 271, + "StartLinePosition": 2, + "StopLine": 271, + "StopLinePosition": 31 + } + } + ] + } + ], + "Implements": [ + "org.apache.avro.specific.SpecificRecord" + ], + "Extend": "org.apache.avro.specific.SpecificRecordBase", + "Functions": [ + { + "Name": "writeExternal", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org", + "FunctionName": "write", + "Parameters": [ + { + "TypeValue": "this", + "TypeType": "" + }, + { + "TypeValue": "SpecificData.getEncoder(out)", + "TypeType": "" + } + ], + "Position": { + "StartLine": 268, + "StartLinePosition": 12, + "StopLine": 268, + "StopLinePosition": 52 + } + }, + { + "Package": "org.apache.avro.specific", + "Type": "CHAIN", + "NodeName": "SpecificData", + "FunctionName": "getEncoder", + "Parameters": [ + { + "TypeValue": "out", + "TypeType": "" + } + ], + "Position": { + "StartLine": 268, + "StartLinePosition": 37, + "StopLine": 268, + "StopLinePosition": 51 + } + }, + { + "Package": "org.apache.avro.specific", + "Type": "CHAIN", + "NodeName": "SpecificData", + "FunctionName": "createDatumReader", + "Parameters": [ + { + "TypeValue": "SCHEMA$", + "TypeType": "" + } + ], + "Position": { + "StartLine": 273, + "StartLinePosition": 68, + "StopLine": 273, + "StopLinePosition": 93 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 266, + "StartLinePosition": 2, + "StopLine": 266, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 266, + "StartLinePosition": 19, + "StopLine": 269, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + }, + { + "TypeValue": "record", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "newSpartansValue()", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + } + ], + "BodyHash": 1821324851, + "Content": "void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); " + }, + { + "Name": "readExternal", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org", + "FunctionName": "read", + "Parameters": [ + { + "TypeValue": "this", + "TypeType": "" + }, + { + "TypeValue": "SpecificData.getDecoder(in)", + "TypeType": "" + } + ], + "Position": { + "StartLine": 277, + "StartLinePosition": 12, + "StopLine": 277, + "StopLinePosition": 50 + } + }, + { + "Package": "org.apache.avro.specific", + "Type": "CHAIN", + "NodeName": "SpecificData", + "FunctionName": "getDecoder", + "Parameters": [ + { + "TypeValue": "in", + "TypeType": "" + } + ], + "Position": { + "StartLine": 277, + "StartLinePosition": 36, + "StopLine": 277, + "StopLinePosition": 49 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 275, + "StartLinePosition": 2, + "StopLine": 275, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 275, + "StartLinePosition": 19, + "StopLine": 278, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + }, + { + "TypeValue": "record", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "newSpartansValue()", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + }, + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "BodyHash": -1459730253, + "Content": "void readExternal(java.io.ObjectInput in) throws java.io.IOException { READER$.read(this, SpecificData.getDecoder(in)); " + }, + { + "Name": "hasCustomCoders", + "ReturnType": "boolean", + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 280, + "StartLinePosition": 2, + "StopLine": 280, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 280, + "StartLinePosition": 22, + "StopLine": 280, + "StopLinePosition": 63 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + }, + { + "TypeValue": "record", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "newSpartansValue()", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + }, + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "BodyHash": 41618139, + "Content": "boolean hasCustomCoders() { return true; }" + }, + { + "Name": "customEncode", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "out", + "TypeType": "org.apache.avro.io.Encoder" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeIndex", + "Parameters": [ + { + "TypeValue": "0", + "TypeType": "" + } + ], + "Position": { + "StartLine": 286, + "StartLinePosition": 10, + "StopLine": 286, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeNull", + "Position": { + "StartLine": 287, + "StartLinePosition": 10, + "StopLine": 287, + "StopLinePosition": 20 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeIndex", + "Parameters": [ + { + "TypeValue": "1", + "TypeType": "" + } + ], + "Position": { + "StartLine": 289, + "StartLinePosition": 10, + "StopLine": 289, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeBytes", + "Parameters": [ + { + "TypeValue": "this.medData", + "TypeType": "" + } + ], + "Position": { + "StartLine": 290, + "StartLinePosition": 10, + "StopLine": 290, + "StopLinePosition": 33 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 282, + "StartLinePosition": 2, + "StopLine": 282, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 282, + "StartLinePosition": 19, + "StopLine": 293, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + }, + { + "TypeValue": "record", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "newSpartansValue()", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "out", + "TypeType": "org.apache.avro.io.Encoder" + }, + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "BodyHash": -166140687, + "Content": "void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { if (this.medData == null) { out.writeIndex(0); out.writeNull(); } else { out.writeIndex(1); out.writeBytes(this.medData); } " + }, + { + "Name": "customDecode", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "in", + "TypeType": "org.apache.avro.io.ResolvingDecoder" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readFieldOrderIfDiff", + "Position": { + "StartLine": 298, + "StartLinePosition": 51, + "StopLine": 298, + "StopLinePosition": 72 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readIndex", + "Position": { + "StartLine": 300, + "StartLinePosition": 13, + "StopLine": 300, + "StopLinePosition": 23 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readNull", + "Position": { + "StartLine": 301, + "StartLinePosition": 11, + "StopLine": 301, + "StopLinePosition": 20 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readBytes", + "Parameters": [ + { + "TypeValue": "this.medData", + "TypeType": "" + } + ], + "Position": { + "StartLine": 304, + "StartLinePosition": 26, + "StopLine": 304, + "StopLinePosition": 48 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "fieldOrder[i]", + "FunctionName": "pos", + "Position": { + "StartLine": 309, + "StartLinePosition": 30, + "StopLine": 309, + "StopLinePosition": 34 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readIndex", + "Position": { + "StartLine": 311, + "StartLinePosition": 17, + "StopLine": 311, + "StopLinePosition": 27 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readNull", + "Position": { + "StartLine": 312, + "StartLinePosition": 15, + "StopLine": 312, + "StopLinePosition": 24 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readBytes", + "Parameters": [ + { + "TypeValue": "this.medData", + "TypeType": "" + } + ], + "Position": { + "StartLine": 315, + "StartLinePosition": 30, + "StopLine": 315, + "StopLinePosition": 52 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 295, + "StartLinePosition": 2, + "StopLine": 295, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 295, + "StartLinePosition": 19, + "StopLine": 324, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "java" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + }, + { + "TypeValue": "record", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "newSpartansValue()", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "out", + "TypeType": "org.apache.avro.io.Encoder" + }, + { + "TypeValue": "in", + "TypeType": "org.apache.avro.io.ResolvingDecoder" + }, + { + "TypeValue": "fieldOrder", + "TypeType": "org.apache.avro.Schema.Field[]" + }, + { + "TypeValue": "i", + "TypeType": "int" + } + ], + "BodyHash": -395004288, + "Content": "void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); if (fieldOrder == null) { if (in.readIndex() != 1) { in.readNull(); this.medData = null; } else { this.medData = in.readBytes(this.medData); } } else { for (int i = 0; i < 1; i++) { switch (fieldOrder[i].pos()) { case 0: if (in.readIndex() != 1) { in.readNull(); this.medData = null; } else { this.medData = in.readBytes(this.medData); } break; default: throw new java.io.IOException(\"Corrupt ResolvingDecoder.\"); } } } " + } + ], + "InnerStructures": [ + { + "NodeName": "Builder", + "Type": "INNER_STRUCTURES", + "Package": "com.spartans.schema", + "FilePath": "SpartansValue.java", + "Implements": [ + "org.apache.avro.data.RecordBuilder" + ], + "Extend": "org.apache.avro.specific.SpecificRecordBuilderBase", + "Functions": [ + { + "Name": "Builder", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "Type": "SUPER", + "NodeName": "org.apache.avro.specific.SpecificRecordBuilderBase", + "FunctionName": "super", + "Parameters": [ + { + "TypeValue": "SCHEMA$", + "TypeType": "" + } + ], + "Position": { + "StartLine": 176, + "StartLinePosition": 6, + "StopLine": 176, + "StopLinePosition": 19 + } + } + ], + "Position": { + "StartLine": 175, + "StartLinePosition": 12, + "StopLine": 177, + "StopLinePosition": 4 + }, + "IsConstructor": true + }, + { + "Name": "Builder", + "Parameters": [ + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue.Builder" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "Type": "SUPER", + "NodeName": "org.apache.avro.specific.SpecificRecordBuilderBase", + "FunctionName": "super", + "Parameters": [ + { + "TypeValue": "other", + "TypeType": "" + } + ], + "Position": { + "StartLine": 184, + "StartLinePosition": 6, + "StopLine": 184, + "StopLinePosition": 17 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "isValidValue", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + }, + { + "TypeValue": "other.medData", + "TypeType": "" + } + ], + "Position": { + "StartLine": 185, + "StartLinePosition": 10, + "StopLine": 185, + "StopLinePosition": 49 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 185, + "StartLinePosition": 23, + "StopLine": 185, + "StopLinePosition": 30 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "data", + "Position": { + "StartLine": 186, + "StartLinePosition": 23, + "StopLine": 186, + "StopLinePosition": 28 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "data", + "FunctionName": "deepCopy", + "Parameters": [ + { + "TypeValue": "fields()[0].schema()", + "TypeType": "" + }, + { + "TypeValue": "other.medData", + "TypeType": "" + } + ], + "Position": { + "StartLine": 186, + "StartLinePosition": 30, + "StopLine": 186, + "StopLinePosition": 74 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 186, + "StartLinePosition": 39, + "StopLine": 186, + "StopLinePosition": 46 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "fields()[0]", + "FunctionName": "schema", + "Position": { + "StartLine": 186, + "StartLinePosition": 51, + "StopLine": 186, + "StopLinePosition": 58 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 187, + "StartLinePosition": 8, + "StopLine": 187, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "com.spartans.schema.SpartansValue.Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 187, + "StartLinePosition": 35, + "StopLine": 187, + "StopLinePosition": 49 + } + } + ], + "Position": { + "StartLine": 183, + "StartLinePosition": 12, + "StopLine": 189, + "StopLinePosition": 4 + }, + "IsConstructor": true + }, + { + "Name": "Builder", + "Parameters": [ + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "Type": "SUPER", + "NodeName": "org.apache.avro.specific.SpecificRecordBuilderBase", + "FunctionName": "super", + "Parameters": [ + { + "TypeValue": "SCHEMA$", + "TypeType": "" + } + ], + "Position": { + "StartLine": 196, + "StartLinePosition": 6, + "StopLine": 196, + "StopLinePosition": 19 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "isValidValue", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + }, + { + "TypeValue": "other.medData", + "TypeType": "" + } + ], + "Position": { + "StartLine": 197, + "StartLinePosition": 10, + "StopLine": 197, + "StopLinePosition": 49 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 197, + "StartLinePosition": 23, + "StopLine": 197, + "StopLinePosition": 30 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "data", + "Position": { + "StartLine": 198, + "StartLinePosition": 23, + "StopLine": 198, + "StopLinePosition": 28 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "data", + "FunctionName": "deepCopy", + "Parameters": [ + { + "TypeValue": "fields()[0].schema()", + "TypeType": "" + }, + { + "TypeValue": "other.medData", + "TypeType": "" + } + ], + "Position": { + "StartLine": 198, + "StartLinePosition": 30, + "StopLine": 198, + "StopLinePosition": 74 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 198, + "StartLinePosition": 39, + "StopLine": 198, + "StopLinePosition": 46 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "fields()[0]", + "FunctionName": "schema", + "Position": { + "StartLine": 198, + "StartLinePosition": 51, + "StopLine": 198, + "StopLinePosition": 58 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 199, + "StartLinePosition": 8, + "StopLine": 199, + "StopLinePosition": 22 + } + } + ], + "Position": { + "StartLine": 195, + "StartLinePosition": 12, + "StopLine": 201, + "StopLinePosition": 4 + }, + "IsConstructor": true + }, + { + "Name": "getMedData", + "ReturnType": "java.nio.ByteBuffer", + "Position": { + "StartLine": 208, + "StartLinePosition": 11, + "StopLine": 210, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + } + ], + "BodyHash": -1631522643 + }, + { + "Name": "setMedData", + "ReturnType": "com.spartans.schema.SpartansValue.Builder", + "Parameters": [ + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "validate", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + }, + { + "TypeValue": "value", + "TypeType": "" + } + ], + "Position": { + "StartLine": 220, + "StartLinePosition": 6, + "StopLine": 220, + "StopLinePosition": 33 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 220, + "StartLinePosition": 15, + "StopLine": 220, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 222, + "StartLinePosition": 6, + "StopLine": 222, + "StopLinePosition": 20 + } + } + ], + "Position": { + "StartLine": 219, + "StartLinePosition": 11, + "StopLine": 224, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + } + ], + "BodyHash": 1402443483 + }, + { + "Name": "hasMedData", + "ReturnType": "boolean", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 232, + "StartLinePosition": 13, + "StopLine": 232, + "StopLinePosition": 27 + } + } + ], + "Position": { + "StartLine": 231, + "StartLinePosition": 11, + "StopLine": 233, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + } + ], + "BodyHash": -207772747 + }, + { + "Name": "clearMedData", + "ReturnType": "com.spartans.schema.SpartansValue.Builder", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 243, + "StartLinePosition": 6, + "StopLine": 243, + "StopLinePosition": 20 + } + } + ], + "Position": { + "StartLine": 241, + "StartLinePosition": 11, + "StopLine": 245, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + } + ], + "BodyHash": 732772491 + }, + { + "Name": "build", + "ReturnType": "SpartansValue", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 252, + "StartLinePosition": 25, + "StopLine": 252, + "StopLinePosition": 39 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "defaultValue", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + } + ], + "Position": { + "StartLine": 252, + "StartLinePosition": 83, + "StopLine": 252, + "StopLinePosition": 107 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 252, + "StartLinePosition": 96, + "StopLine": 252, + "StopLinePosition": 103 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 247, + "StartLinePosition": 4, + "StopLine": 247, + "StopLinePosition": 5 + } + }, + { + "Name": "SuppressWarnings", + "KeyValues": [ + { + "Key": "\"unchecked\"", + "Value": "\"unchecked\"" + } + ], + "Position": { + "StartLine": 248, + "StartLinePosition": 4, + "StopLine": 248, + "StopLinePosition": 33 + } + } + ], + "Position": { + "StartLine": 249, + "StartLinePosition": 11, + "StopLine": 259, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "medData", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansValue" + }, + { + "TypeValue": "record", + "TypeType": "SpartansValue" + }, + { + "TypeValue": "newSpartansValue()", + "TypeType": "SpartansValue" + } + ], + "BodyHash": 1283851683 + } + ], + "Position": { + "StartLine": 168, + "StartLinePosition": 16, + "StopLine": 260, + "StopLinePosition": 2 + } + } + ], + "Annotations": [ + { + "Name": "org.apache.avro.specific.AvroGenerated", + "Position": { + "StartLine": 16, + "StopLine": 16, + "StopLinePosition": 26 + } + } + ], + "FunctionCalls": [ + { + "Package": "org.apache.avro.specific", + "Type": "FIELD", + "NodeName": "SpecificData", + "Position": { + "StartLine": 22, + "StartLinePosition": 17, + "StopLine": 22, + "StopLinePosition": 57 + } + }, + { + "Package": "org.apache.avro.message", + "Type": "FIELD", + "NodeName": "BinaryMessageEncoder", + "Position": { + "StartLine": 24, + "StartLinePosition": 23, + "StopLine": 25, + "StopLinePosition": 62 + } + }, + { + "Package": "org.apache.avro.message", + "Type": "FIELD", + "NodeName": "BinaryMessageDecoder", + "Position": { + "StartLine": 27, + "StartLinePosition": 23, + "StopLine": 28, + "StopLinePosition": 62 + } + } + ], + "Imports": [ + { + "Source": "org.apache.avro.generic.GenericArray" + }, + { + "Source": "org.apache.avro.specific.SpecificData" + }, + { + "Source": "org.apache.avro.util.Utf8" + }, + { + "Source": "org.apache.avro.message.BinaryMessageEncoder" + }, + { + "Source": "org.apache.avro.message.BinaryMessageDecoder" + }, + { + "Source": "org.apache.avro.message.SchemaStore" + } + ], + "Position": { + "StartLine": 17, + "StartLinePosition": 7, + "StopLine": 325 + }, + "Content": "class SpartansValue extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { private static final long serialVersionUID = -3842884171322737207L; public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SpartansValue\\\",\\\"namespace\\\":\\\"com.spartans.schema\\\",\\\"doc\\\":\\\"This record contins metadata about the payload\\\",\\\"fields\\\":[{\\\"name\\\":\\\"medData\\\",\\\"type\\\":[\\\"null\\\",\\\"bytes\\\"],\\\"doc\\\":\\\"This field indicates the payload in its byte form\\\",\\\"default\\\":null}]}\"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } private static SpecificData MODEL$ = new SpecificData(); private static final BinaryMessageEncoder ENCODER = new BinaryMessageEncoder(MODEL$, SCHEMA$); private static final BinaryMessageDecoder DECODER = new BinaryMessageDecoder(MODEL$, SCHEMA$); /** * Return the BinaryMessageEncoder instance used by this class. * @return the message encoder used by this class */ public static BinaryMessageEncoder getEncoder() { return ENCODER; } /** * Return the BinaryMessageDecoder instance used by this class. * @return the message decoder used by this class */ public static BinaryMessageDecoder getDecoder() { return DECODER; } /** * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. * @param resolver a {@link SchemaStore} used to find schemas by fingerprint * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore */ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); } /** * Serializes this SpartansValue to a ByteBuffer. * @return a buffer holding the serialized data for this instance * @throws java.io.IOException if this instance could not be serialized */ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { return ENCODER.encode(this); } /** * Deserializes a SpartansValue from a ByteBuffer. * @param b a byte buffer holding serialized data for an instance of this class * @return a SpartansValue instance decoded from the given buffer * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class */ public static SpartansValue fromByteBuffer( java.nio.ByteBuffer b) throws java.io.IOException { return DECODER.decode(b); } /** This field indicates the payload in its byte form */ private java.nio.ByteBuffer medData; /** * Default constructor. Note that this does not initialize fields * to their default values from the schema. If that is desired then * one should use newBuilder(). */ public SpartansValue() {} /** * All-args constructor. * @param medData This field indicates the payload in its byte form */ public SpartansValue(java.nio.ByteBuffer medData) { this.medData = medData; } public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. public java.lang.Object get(int field$) { switch (field$) { case 0: return medData; default: throw new IndexOutOfBoundsException(\"Invalid index: \" + field$); } } // Used by DatumReader. Applications should not call. @SuppressWarnings(value=\"unchecked\") public void put(int field$, java.lang.Object value$) { switch (field$) { case 0: medData = (java.nio.ByteBuffer)value$; break; default: throw new IndexOutOfBoundsException(\"Invalid index: \" + field$); } } /** * Gets the value of the 'medData' field. * @return This field indicates the payload in its byte form */ public java.nio.ByteBuffer getMedData() { return medData; } /** * Sets the value of the 'medData' field. * This field indicates the payload in its byte form * @param value the value to set. */ public void setMedData(java.nio.ByteBuffer value) { this.medData = value; } /** * Creates a new SpartansValue RecordBuilder. * @return A new SpartansValue RecordBuilder */ public static com.spartans.schema.SpartansValue.Builder newBuilder() { return new com.spartans.schema.SpartansValue.Builder(); } /** * Creates a new SpartansValue RecordBuilder by copying an existing Builder. * @param other The existing builder to copy. * @return A new SpartansValue RecordBuilder */ public static com.spartans.schema.SpartansValue.Builder newBuilder(com.spartans.schema.SpartansValue.Builder other) { if (other == null) { return new com.spartans.schema.SpartansValue.Builder(); } else { return new com.spartans.schema.SpartansValue.Builder(other); } } /** * Creates a new SpartansValue RecordBuilder by copying an existing SpartansValue instance. * @param other The existing instance to copy. * @return A new SpartansValue RecordBuilder */ public static com.spartans.schema.SpartansValue.Builder newBuilder(com.spartans.schema.SpartansValue other) { if (other == null) { return new com.spartans.schema.SpartansValue.Builder(); } else { return new com.spartans.schema.SpartansValue.Builder(other); } } /** * RecordBuilder for SpartansValue instances. */ @org.apache.avro.specific.AvroGenerated public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase implements org.apache.avro.data.RecordBuilder { /** This field indicates the payload in its byte form */ private java.nio.ByteBuffer medData; /** Creates a new Builder */ private Builder() { super(SCHEMA$); } /** * Creates a Builder by copying an existing Builder. * @param other The existing Builder to copy. */ private Builder(com.spartans.schema.SpartansValue.Builder other) { super(other); if (isValidValue(fields()[0], other.medData)) { this.medData = data().deepCopy(fields()[0].schema(), other.medData); fieldSetFlags()[0] = other.fieldSetFlags()[0]; } } /** * Creates a Builder by copying an existing SpartansValue instance * @param other The existing instance to copy. */ private Builder(com.spartans.schema.SpartansValue other) { super(SCHEMA$); if (isValidValue(fields()[0], other.medData)) { this.medData = data().deepCopy(fields()[0].schema(), other.medData); fieldSetFlags()[0] = true; } } /** * Gets the value of the 'medData' field. * This field indicates the payload in its byte form * @return The value. */ public java.nio.ByteBuffer getMedData() { return medData; } /** * Sets the value of the 'medData' field. * This field indicates the payload in its byte form * @param value The value of 'medData'. * @return This builder. */ public com.spartans.schema.SpartansValue.Builder setMedData(java.nio.ByteBuffer value) { validate(fields()[0], value); this.medData = value; fieldSetFlags()[0] = true; return this; } /** * Checks whether the 'medData' field has been set. * This field indicates the payload in its byte form * @return True if the 'medData' field has been set, false otherwise. */ public boolean hasMedData() { return fieldSetFlags()[0]; } /** * Clears the value of the 'medData' field. * This field indicates the payload in its byte form * @return This builder. */ public com.spartans.schema.SpartansValue.Builder clearMedData() { medData = null; fieldSetFlags()[0] = false; return this; } @Override @SuppressWarnings(\"unchecked\") public SpartansValue build() { try { SpartansValue record = new SpartansValue(); record.medData = fieldSetFlags()[0] ? this.medData : (java.nio.ByteBuffer) defaultValue(fields()[0]); return record; } catch (org.apache.avro.AvroMissingFieldException e) { throw e; } catch (java.lang.Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } } @SuppressWarnings(\"unchecked\") private static final org.apache.avro.io.DatumWriter WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); @Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); } @SuppressWarnings(\"unchecked\") private static final org.apache.avro.io.DatumReader READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); @Override public void readExternal(java.io.ObjectInput in) throws java.io.IOException { READER$.read(this, SpecificData.getDecoder(in)); } @Override protected boolean hasCustomCoders() { return true; } @Override public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { if (this.medData == null) { out.writeIndex(0); out.writeNull(); } else { out.writeIndex(1); out.writeBytes(this.medData); } } @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); if (fieldOrder == null) { if (in.readIndex() != 1) { in.readNull(); this.medData = null; } else { this.medData = in.readBytes(this.medData); } } else { for (int i = 0; i < 1; i++) { switch (fieldOrder[i].pos()) { case 0: if (in.readIndex() != 1) { in.readNull(); this.medData = null; } else { this.medData = in.readBytes(this.medData); } break; default: throw new java.io.IOException(\"Corrupt ResolvingDecoder.\"); } } } }" + }, + { + "NodeName": "SpartansKey", + "Module": "root", + "Type": "CLASS", + "Package": "com.spartans.schema", + "FilePath": "main/java/com/spartans/schema/SpartansKey.java", + "Fields": [ + { + "TypeType": "org", + "TypeValue": "neworg.apache.avro.Schema.Parser().parse(\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SpartansKey\\\",\\\"namespace\\\":\\\"com.spartans.schema\\\",\\\"doc\\\":\\\"This record contins metadata about the payload\\\",\\\"fields\\\":[{\\\"name\\\":\\\"patientMrnIdentifier\\\",\\\"type\\\":[\\\"null\\\",\\\"string\\\"],\\\"doc\\\":\\\"This field indicates mrn of a paitent if available\\\",\\\"default\\\":null}]}\")", + "TypeKey": "SCHEMA$" + }, + { + "TypeType": "SpecificData", + "TypeValue": "newSpecificData()", + "TypeKey": "MODEL$" + }, + { + "TypeType": "BinaryMessageEncoder", + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeKey": "ENCODER" + }, + { + "TypeType": "BinaryMessageDecoder", + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeKey": "DECODER" + }, + { + "TypeType": "java", + "TypeKey": "patientMrnIdentifier" + }, + { + "TypeType": "java", + "TypeKey": "patientMrnIdentifier", + "Annotations": [ + { + "Name": "org.apache.avro.specific.AvroGenerated", + "Position": { + "StartLine": 167, + "StartLinePosition": 2, + "StopLine": 167, + "StopLinePosition": 28 + } + } + ] + }, + { + "TypeType": "org", + "TypeValue": "(org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$)", + "TypeKey": "WRITER$", + "Annotations": [ + { + "Name": "SuppressWarnings", + "KeyValues": [ + { + "Key": "\"unchecked\"", + "Value": "\"unchecked\"" + } + ], + "Position": { + "StartLine": 262, + "StartLinePosition": 2, + "StopLine": 262, + "StopLinePosition": 31 + } + } + ] + }, + { + "TypeType": "org", + "TypeValue": "(org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$)", + "TypeKey": "READER$", + "Annotations": [ + { + "Name": "SuppressWarnings", + "KeyValues": [ + { + "Key": "\"unchecked\"", + "Value": "\"unchecked\"" + } + ], + "Position": { + "StartLine": 271, + "StartLinePosition": 2, + "StopLine": 271, + "StopLinePosition": 31 + } + } + ] + } + ], + "Implements": [ + "org.apache.avro.specific.SpecificRecord" + ], + "Extend": "org.apache.avro.specific.SpecificRecordBase", + "Functions": [ + { + "Name": "writeExternal", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org", + "FunctionName": "write", + "Parameters": [ + { + "TypeValue": "this", + "TypeType": "" + }, + { + "TypeValue": "SpecificData.getEncoder(out)", + "TypeType": "" + } + ], + "Position": { + "StartLine": 268, + "StartLinePosition": 12, + "StopLine": 268, + "StopLinePosition": 52 + } + }, + { + "Package": "org.apache.avro.specific", + "Type": "CHAIN", + "NodeName": "SpecificData", + "FunctionName": "getEncoder", + "Parameters": [ + { + "TypeValue": "out", + "TypeType": "" + } + ], + "Position": { + "StartLine": 268, + "StartLinePosition": 37, + "StopLine": 268, + "StopLinePosition": 51 + } + }, + { + "Package": "org.apache.avro.specific", + "Type": "CHAIN", + "NodeName": "SpecificData", + "FunctionName": "createDatumReader", + "Parameters": [ + { + "TypeValue": "SCHEMA$", + "TypeType": "" + } + ], + "Position": { + "StartLine": 273, + "StartLinePosition": 66, + "StopLine": 273, + "StopLinePosition": 91 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 266, + "StartLinePosition": 2, + "StopLine": 266, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 266, + "StartLinePosition": 19, + "StopLine": 269, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + }, + { + "TypeValue": "record", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "newSpartansKey()", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + } + ], + "BodyHash": 1821324851, + "Content": "void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); " + }, + { + "Name": "readExternal", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org", + "FunctionName": "read", + "Parameters": [ + { + "TypeValue": "this", + "TypeType": "" + }, + { + "TypeValue": "SpecificData.getDecoder(in)", + "TypeType": "" + } + ], + "Position": { + "StartLine": 277, + "StartLinePosition": 12, + "StopLine": 277, + "StopLinePosition": 50 + } + }, + { + "Package": "org.apache.avro.specific", + "Type": "CHAIN", + "NodeName": "SpecificData", + "FunctionName": "getDecoder", + "Parameters": [ + { + "TypeValue": "in", + "TypeType": "" + } + ], + "Position": { + "StartLine": 277, + "StartLinePosition": 36, + "StopLine": 277, + "StopLinePosition": 49 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 275, + "StartLinePosition": 2, + "StopLine": 275, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 275, + "StartLinePosition": 19, + "StopLine": 278, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + }, + { + "TypeValue": "record", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "newSpartansKey()", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + }, + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "BodyHash": -1459730253, + "Content": "void readExternal(java.io.ObjectInput in) throws java.io.IOException { READER$.read(this, SpecificData.getDecoder(in)); " + }, + { + "Name": "hasCustomCoders", + "ReturnType": "boolean", + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 280, + "StartLinePosition": 2, + "StopLine": 280, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 280, + "StartLinePosition": 22, + "StopLine": 280, + "StopLinePosition": 63 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + }, + { + "TypeValue": "record", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "newSpartansKey()", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "out", + "TypeType": "java.io.ObjectOutput" + }, + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "BodyHash": 41618139, + "Content": "boolean hasCustomCoders() { return true; }" + }, + { + "Name": "customEncode", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "out", + "TypeType": "org.apache.avro.io.Encoder" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeIndex", + "Parameters": [ + { + "TypeValue": "0", + "TypeType": "" + } + ], + "Position": { + "StartLine": 286, + "StartLinePosition": 10, + "StopLine": 286, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeNull", + "Position": { + "StartLine": 287, + "StartLinePosition": 10, + "StopLine": 287, + "StopLinePosition": 20 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeIndex", + "Parameters": [ + { + "TypeValue": "1", + "TypeType": "" + } + ], + "Position": { + "StartLine": 289, + "StartLinePosition": 10, + "StopLine": 289, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.Encoder", + "FunctionName": "writeString", + "Parameters": [ + { + "TypeValue": "this.patientMrnIdentifier", + "TypeType": "" + } + ], + "Position": { + "StartLine": 290, + "StartLinePosition": 10, + "StopLine": 290, + "StopLinePosition": 47 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 282, + "StartLinePosition": 2, + "StopLine": 282, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 282, + "StartLinePosition": 19, + "StopLine": 293, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + }, + { + "TypeValue": "record", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "newSpartansKey()", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "out", + "TypeType": "org.apache.avro.io.Encoder" + }, + { + "TypeValue": "in", + "TypeType": "java.io.ObjectInput" + } + ], + "BodyHash": 374066237, + "Content": "void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { if (this.patientMrnIdentifier == null) { out.writeIndex(0); out.writeNull(); } else { out.writeIndex(1); out.writeString(this.patientMrnIdentifier); } " + }, + { + "Name": "customDecode", + "ReturnType": "void", + "Parameters": [ + { + "TypeValue": "in", + "TypeType": "org.apache.avro.io.ResolvingDecoder" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readFieldOrderIfDiff", + "Position": { + "StartLine": 298, + "StartLinePosition": 51, + "StopLine": 298, + "StopLinePosition": 72 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readIndex", + "Position": { + "StartLine": 300, + "StartLinePosition": 13, + "StopLine": 300, + "StopLinePosition": 23 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readNull", + "Position": { + "StartLine": 301, + "StartLinePosition": 11, + "StopLine": 301, + "StopLinePosition": 20 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readString", + "Parameters": [ + { + "TypeValue": "this.patientMrnIdentifierinstanceofUtf8?(Utf8)this.patientMrnIdentifier:null", + "TypeType": "" + } + ], + "Position": { + "StartLine": 304, + "StartLinePosition": 39, + "StopLine": 304, + "StopLinePosition": 132 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "fieldOrder[i]", + "FunctionName": "pos", + "Position": { + "StartLine": 309, + "StartLinePosition": 30, + "StopLine": 309, + "StopLinePosition": 34 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readIndex", + "Position": { + "StartLine": 311, + "StartLinePosition": 17, + "StopLine": 311, + "StopLinePosition": 27 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readNull", + "Position": { + "StartLine": 312, + "StartLinePosition": 15, + "StopLine": 312, + "StopLinePosition": 24 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "org.apache.avro.io.ResolvingDecoder", + "FunctionName": "readString", + "Parameters": [ + { + "TypeValue": "this.patientMrnIdentifierinstanceofUtf8?(Utf8)this.patientMrnIdentifier:null", + "TypeType": "" + } + ], + "Position": { + "StartLine": 315, + "StartLinePosition": 43, + "StopLine": 315, + "StopLinePosition": 136 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 295, + "StartLinePosition": 2, + "StopLine": 295, + "StopLinePosition": 3 + } + } + ], + "Position": { + "StartLine": 295, + "StartLinePosition": 19, + "StopLine": 324, + "StopLinePosition": 2 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "java" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + }, + { + "TypeValue": "record", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "newSpartansKey()", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "out", + "TypeType": "org.apache.avro.io.Encoder" + }, + { + "TypeValue": "in", + "TypeType": "org.apache.avro.io.ResolvingDecoder" + }, + { + "TypeValue": "fieldOrder", + "TypeType": "org.apache.avro.Schema.Field[]" + }, + { + "TypeValue": "i", + "TypeType": "int" + } + ], + "BodyHash": 669654250, + "Content": "void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); if (fieldOrder == null) { if (in.readIndex() != 1) { in.readNull(); this.patientMrnIdentifier = null; } else { this.patientMrnIdentifier = in.readString(this.patientMrnIdentifier instanceof Utf8 ? (Utf8)this.patientMrnIdentifier : null); } } else { for (int i = 0; i < 1; i++) { switch (fieldOrder[i].pos()) { case 0: if (in.readIndex() != 1) { in.readNull(); this.patientMrnIdentifier = null; } else { this.patientMrnIdentifier = in.readString(this.patientMrnIdentifier instanceof Utf8 ? (Utf8)this.patientMrnIdentifier : null); } break; default: throw new java.io.IOException(\"Corrupt ResolvingDecoder.\"); } } } " + } + ], + "InnerStructures": [ + { + "NodeName": "Builder", + "Type": "INNER_STRUCTURES", + "Package": "com.spartans.schema", + "FilePath": "SpartansKey.java", + "Implements": [ + "org.apache.avro.data.RecordBuilder" + ], + "Extend": "org.apache.avro.specific.SpecificRecordBuilderBase", + "Functions": [ + { + "Name": "Builder", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "Type": "SUPER", + "NodeName": "org.apache.avro.specific.SpecificRecordBuilderBase", + "FunctionName": "super", + "Parameters": [ + { + "TypeValue": "SCHEMA$", + "TypeType": "" + } + ], + "Position": { + "StartLine": 176, + "StartLinePosition": 6, + "StopLine": 176, + "StopLinePosition": 19 + } + } + ], + "Position": { + "StartLine": 175, + "StartLinePosition": 12, + "StopLine": 177, + "StopLinePosition": 4 + }, + "IsConstructor": true + }, + { + "Name": "Builder", + "Parameters": [ + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey.Builder" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "Type": "SUPER", + "NodeName": "org.apache.avro.specific.SpecificRecordBuilderBase", + "FunctionName": "super", + "Parameters": [ + { + "TypeValue": "other", + "TypeType": "" + } + ], + "Position": { + "StartLine": 184, + "StartLinePosition": 6, + "StopLine": 184, + "StopLinePosition": 17 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "isValidValue", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + }, + { + "TypeValue": "other.patientMrnIdentifier", + "TypeType": "" + } + ], + "Position": { + "StartLine": 185, + "StartLinePosition": 10, + "StopLine": 185, + "StopLinePosition": 62 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 185, + "StartLinePosition": 23, + "StopLine": 185, + "StopLinePosition": 30 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "data", + "Position": { + "StartLine": 186, + "StartLinePosition": 36, + "StopLine": 186, + "StopLinePosition": 41 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "data", + "FunctionName": "deepCopy", + "Parameters": [ + { + "TypeValue": "fields()[0].schema()", + "TypeType": "" + }, + { + "TypeValue": "other.patientMrnIdentifier", + "TypeType": "" + } + ], + "Position": { + "StartLine": 186, + "StartLinePosition": 43, + "StopLine": 186, + "StopLinePosition": 100 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 186, + "StartLinePosition": 52, + "StopLine": 186, + "StopLinePosition": 59 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "fields()[0]", + "FunctionName": "schema", + "Position": { + "StartLine": 186, + "StartLinePosition": 64, + "StopLine": 186, + "StopLinePosition": 71 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 187, + "StartLinePosition": 8, + "StopLine": 187, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "com.spartans.schema.SpartansKey.Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 187, + "StartLinePosition": 35, + "StopLine": 187, + "StopLinePosition": 49 + } + } + ], + "Position": { + "StartLine": 183, + "StartLinePosition": 12, + "StopLine": 189, + "StopLinePosition": 4 + }, + "IsConstructor": true + }, + { + "Name": "Builder", + "Parameters": [ + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "Type": "SUPER", + "NodeName": "org.apache.avro.specific.SpecificRecordBuilderBase", + "FunctionName": "super", + "Parameters": [ + { + "TypeValue": "SCHEMA$", + "TypeType": "" + } + ], + "Position": { + "StartLine": 196, + "StartLinePosition": 6, + "StopLine": 196, + "StopLinePosition": 19 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "isValidValue", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + }, + { + "TypeValue": "other.patientMrnIdentifier", + "TypeType": "" + } + ], + "Position": { + "StartLine": 197, + "StartLinePosition": 10, + "StopLine": 197, + "StopLinePosition": 62 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 197, + "StartLinePosition": 23, + "StopLine": 197, + "StopLinePosition": 30 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "data", + "Position": { + "StartLine": 198, + "StartLinePosition": 36, + "StopLine": 198, + "StopLinePosition": 41 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "data", + "FunctionName": "deepCopy", + "Parameters": [ + { + "TypeValue": "fields()[0].schema()", + "TypeType": "" + }, + { + "TypeValue": "other.patientMrnIdentifier", + "TypeType": "" + } + ], + "Position": { + "StartLine": 198, + "StartLinePosition": 43, + "StopLine": 198, + "StopLinePosition": 100 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 198, + "StartLinePosition": 52, + "StopLine": 198, + "StopLinePosition": 59 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "fields()[0]", + "FunctionName": "schema", + "Position": { + "StartLine": 198, + "StartLinePosition": 64, + "StopLine": 198, + "StopLinePosition": 71 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 199, + "StartLinePosition": 8, + "StopLine": 199, + "StopLinePosition": 22 + } + } + ], + "Position": { + "StartLine": 195, + "StartLinePosition": 12, + "StopLine": 201, + "StopLinePosition": 4 + }, + "IsConstructor": true + }, + { + "Name": "getPatientMrnIdentifier", + "ReturnType": "java.lang.CharSequence", + "Position": { + "StartLine": 208, + "StartLinePosition": 11, + "StopLine": 210, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + } + ], + "BodyHash": 1832851834 + }, + { + "Name": "setPatientMrnIdentifier", + "ReturnType": "com.spartans.schema.SpartansKey.Builder", + "Parameters": [ + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + } + ], + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "validate", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + }, + { + "TypeValue": "value", + "TypeType": "" + } + ], + "Position": { + "StartLine": 220, + "StartLinePosition": 6, + "StopLine": 220, + "StopLinePosition": 33 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 220, + "StartLinePosition": 15, + "StopLine": 220, + "StopLinePosition": 22 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 222, + "StartLinePosition": 6, + "StopLine": 222, + "StopLinePosition": 20 + } + } + ], + "Position": { + "StartLine": 219, + "StartLinePosition": 11, + "StopLine": 224, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + } + ], + "BodyHash": -313762912 + }, + { + "Name": "hasPatientMrnIdentifier", + "ReturnType": "boolean", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 232, + "StartLinePosition": 13, + "StopLine": 232, + "StopLinePosition": 27 + } + } + ], + "Position": { + "StartLine": 231, + "StartLinePosition": 11, + "StopLine": 233, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + } + ], + "BodyHash": -207772747 + }, + { + "Name": "clearPatientMrnIdentifier", + "ReturnType": "com.spartans.schema.SpartansKey.Builder", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 243, + "StartLinePosition": 6, + "StopLine": 243, + "StopLinePosition": 20 + } + } + ], + "Position": { + "StartLine": 241, + "StartLinePosition": 11, + "StopLine": 245, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "IndexOutOfBoundsException" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + } + ], + "BodyHash": 1900725822 + }, + { + "Name": "build", + "ReturnType": "SpartansKey", + "FunctionCalls": [ + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fieldSetFlags", + "Position": { + "StartLine": 252, + "StartLinePosition": 38, + "StopLine": 252, + "StopLinePosition": 52 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "defaultValue", + "Parameters": [ + { + "TypeValue": "fields()[0]", + "TypeType": "" + } + ], + "Position": { + "StartLine": 252, + "StartLinePosition": 112, + "StopLine": 252, + "StopLinePosition": 136 + } + }, + { + "Package": "com.spartans.schema", + "NodeName": "Builder", + "FunctionName": "fields", + "Position": { + "StartLine": 252, + "StartLinePosition": 125, + "StopLine": 252, + "StopLinePosition": 132 + } + } + ], + "Annotations": [ + { + "Name": "Override", + "Position": { + "StartLine": 247, + "StartLinePosition": 4, + "StopLine": 247, + "StopLinePosition": 5 + } + }, + { + "Name": "SuppressWarnings", + "KeyValues": [ + { + "Key": "\"unchecked\"", + "Value": "\"unchecked\"" + } + ], + "Position": { + "StartLine": 248, + "StartLinePosition": 4, + "StopLine": 248, + "StopLinePosition": 33 + } + } + ], + "Position": { + "StartLine": 249, + "StartLinePosition": 11, + "StopLine": 259, + "StopLinePosition": 4 + }, + "LocalVariables": [ + { + "TypeValue": "neworg.apache.avro.Schema.Parser()", + "TypeType": "org" + }, + { + "TypeValue": "newSpecificData()", + "TypeType": "SpecificData" + }, + { + "TypeValue": "newBinaryMessageEncoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageEncoder" + }, + { + "TypeValue": "newBinaryMessageDecoder(MODEL$,SCHEMA$)", + "TypeType": "BinaryMessageDecoder" + }, + { + "TypeValue": "resolver", + "TypeType": "SchemaStore" + }, + { + "TypeValue": "return", + "TypeType": "com" + }, + { + "TypeValue": "b", + "TypeType": "java.nio.ByteBuffer" + }, + { + "TypeValue": "patientMrnIdentifier", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "field$", + "TypeType": "int" + }, + { + "TypeValue": "throw", + "TypeType": "org" + }, + { + "TypeValue": "value$", + "TypeType": "java.lang.Object" + }, + { + "TypeValue": "value", + "TypeType": "java.lang.CharSequence" + }, + { + "TypeValue": "other", + "TypeType": "com.spartans.schema.SpartansKey" + }, + { + "TypeValue": "record", + "TypeType": "SpartansKey" + }, + { + "TypeValue": "newSpartansKey()", + "TypeType": "SpartansKey" + } + ], + "BodyHash": 1176080530 + } + ], + "Position": { + "StartLine": 168, + "StartLinePosition": 16, + "StopLine": 260, + "StopLinePosition": 2 + } + } + ], + "Annotations": [ + { + "Name": "org.apache.avro.specific.AvroGenerated", + "Position": { + "StartLine": 16, + "StopLine": 16, + "StopLinePosition": 26 + } + } + ], + "FunctionCalls": [ + { + "Package": "org.apache.avro.specific", + "Type": "FIELD", + "NodeName": "SpecificData", + "Position": { + "StartLine": 22, + "StartLinePosition": 17, + "StopLine": 22, + "StopLinePosition": 57 + } + }, + { + "Package": "org.apache.avro.message", + "Type": "FIELD", + "NodeName": "BinaryMessageEncoder", + "Position": { + "StartLine": 24, + "StartLinePosition": 23, + "StopLine": 25, + "StopLinePosition": 60 + } + }, + { + "Package": "org.apache.avro.message", + "Type": "FIELD", + "NodeName": "BinaryMessageDecoder", + "Position": { + "StartLine": 27, + "StartLinePosition": 23, + "StopLine": 28, + "StopLinePosition": 60 + } + } + ], + "Imports": [ + { + "Source": "org.apache.avro.generic.GenericArray" + }, + { + "Source": "org.apache.avro.specific.SpecificData" + }, + { + "Source": "org.apache.avro.util.Utf8" + }, + { + "Source": "org.apache.avro.message.BinaryMessageEncoder" + }, + { + "Source": "org.apache.avro.message.BinaryMessageDecoder" + }, + { + "Source": "org.apache.avro.message.SchemaStore" + } + ], + "Position": { + "StartLine": 17, + "StartLinePosition": 7, + "StopLine": 325 + }, + "Content": "class SpartansKey extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { private static final long serialVersionUID = -5863712184793094632L; public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SpartansKey\\\",\\\"namespace\\\":\\\"com.spartans.schema\\\",\\\"doc\\\":\\\"This record contins metadata about the payload\\\",\\\"fields\\\":[{\\\"name\\\":\\\"patientMrnIdentifier\\\",\\\"type\\\":[\\\"null\\\",\\\"string\\\"],\\\"doc\\\":\\\"This field indicates mrn of a paitent if available\\\",\\\"default\\\":null}]}\"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } private static SpecificData MODEL$ = new SpecificData(); private static final BinaryMessageEncoder ENCODER = new BinaryMessageEncoder(MODEL$, SCHEMA$); private static final BinaryMessageDecoder DECODER = new BinaryMessageDecoder(MODEL$, SCHEMA$); /** * Return the BinaryMessageEncoder instance used by this class. * @return the message encoder used by this class */ public static BinaryMessageEncoder getEncoder() { return ENCODER; } /** * Return the BinaryMessageDecoder instance used by this class. * @return the message decoder used by this class */ public static BinaryMessageDecoder getDecoder() { return DECODER; } /** * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. * @param resolver a {@link SchemaStore} used to find schemas by fingerprint * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore */ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { return new BinaryMessageDecoder(MODEL$, SCHEMA$, resolver); } /** * Serializes this SpartansKey to a ByteBuffer. * @return a buffer holding the serialized data for this instance * @throws java.io.IOException if this instance could not be serialized */ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { return ENCODER.encode(this); } /** * Deserializes a SpartansKey from a ByteBuffer. * @param b a byte buffer holding serialized data for an instance of this class * @return a SpartansKey instance decoded from the given buffer * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class */ public static SpartansKey fromByteBuffer( java.nio.ByteBuffer b) throws java.io.IOException { return DECODER.decode(b); } /** This field indicates mrn of a paitent if available */ private java.lang.CharSequence patientMrnIdentifier; /** * Default constructor. Note that this does not initialize fields * to their default values from the schema. If that is desired then * one should use newBuilder(). */ public SpartansKey() {} /** * All-args constructor. * @param patientMrnIdentifier This field indicates mrn of a paitent if available */ public SpartansKey(java.lang.CharSequence patientMrnIdentifier) { this.patientMrnIdentifier = patientMrnIdentifier; } public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. public java.lang.Object get(int field$) { switch (field$) { case 0: return patientMrnIdentifier; default: throw new IndexOutOfBoundsException(\"Invalid index: \" + field$); } } // Used by DatumReader. Applications should not call. @SuppressWarnings(value=\"unchecked\") public void put(int field$, java.lang.Object value$) { switch (field$) { case 0: patientMrnIdentifier = (java.lang.CharSequence)value$; break; default: throw new IndexOutOfBoundsException(\"Invalid index: \" + field$); } } /** * Gets the value of the 'patientMrnIdentifier' field. * @return This field indicates mrn of a paitent if available */ public java.lang.CharSequence getPatientMrnIdentifier() { return patientMrnIdentifier; } /** * Sets the value of the 'patientMrnIdentifier' field. * This field indicates mrn of a paitent if available * @param value the value to set. */ public void setPatientMrnIdentifier(java.lang.CharSequence value) { this.patientMrnIdentifier = value; } /** * Creates a new SpartansKey RecordBuilder. * @return A new SpartansKey RecordBuilder */ public static com.spartans.schema.SpartansKey.Builder newBuilder() { return new com.spartans.schema.SpartansKey.Builder(); } /** * Creates a new SpartansKey RecordBuilder by copying an existing Builder. * @param other The existing builder to copy. * @return A new SpartansKey RecordBuilder */ public static com.spartans.schema.SpartansKey.Builder newBuilder(com.spartans.schema.SpartansKey.Builder other) { if (other == null) { return new com.spartans.schema.SpartansKey.Builder(); } else { return new com.spartans.schema.SpartansKey.Builder(other); } } /** * Creates a new SpartansKey RecordBuilder by copying an existing SpartansKey instance. * @param other The existing instance to copy. * @return A new SpartansKey RecordBuilder */ public static com.spartans.schema.SpartansKey.Builder newBuilder(com.spartans.schema.SpartansKey other) { if (other == null) { return new com.spartans.schema.SpartansKey.Builder(); } else { return new com.spartans.schema.SpartansKey.Builder(other); } } /** * RecordBuilder for SpartansKey instances. */ @org.apache.avro.specific.AvroGenerated public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase implements org.apache.avro.data.RecordBuilder { /** This field indicates mrn of a paitent if available */ private java.lang.CharSequence patientMrnIdentifier; /** Creates a new Builder */ private Builder() { super(SCHEMA$); } /** * Creates a Builder by copying an existing Builder. * @param other The existing Builder to copy. */ private Builder(com.spartans.schema.SpartansKey.Builder other) { super(other); if (isValidValue(fields()[0], other.patientMrnIdentifier)) { this.patientMrnIdentifier = data().deepCopy(fields()[0].schema(), other.patientMrnIdentifier); fieldSetFlags()[0] = other.fieldSetFlags()[0]; } } /** * Creates a Builder by copying an existing SpartansKey instance * @param other The existing instance to copy. */ private Builder(com.spartans.schema.SpartansKey other) { super(SCHEMA$); if (isValidValue(fields()[0], other.patientMrnIdentifier)) { this.patientMrnIdentifier = data().deepCopy(fields()[0].schema(), other.patientMrnIdentifier); fieldSetFlags()[0] = true; } } /** * Gets the value of the 'patientMrnIdentifier' field. * This field indicates mrn of a paitent if available * @return The value. */ public java.lang.CharSequence getPatientMrnIdentifier() { return patientMrnIdentifier; } /** * Sets the value of the 'patientMrnIdentifier' field. * This field indicates mrn of a paitent if available * @param value The value of 'patientMrnIdentifier'. * @return This builder. */ public com.spartans.schema.SpartansKey.Builder setPatientMrnIdentifier(java.lang.CharSequence value) { validate(fields()[0], value); this.patientMrnIdentifier = value; fieldSetFlags()[0] = true; return this; } /** * Checks whether the 'patientMrnIdentifier' field has been set. * This field indicates mrn of a paitent if available * @return True if the 'patientMrnIdentifier' field has been set, false otherwise. */ public boolean hasPatientMrnIdentifier() { return fieldSetFlags()[0]; } /** * Clears the value of the 'patientMrnIdentifier' field. * This field indicates mrn of a paitent if available * @return This builder. */ public com.spartans.schema.SpartansKey.Builder clearPatientMrnIdentifier() { patientMrnIdentifier = null; fieldSetFlags()[0] = false; return this; } @Override @SuppressWarnings(\"unchecked\") public SpartansKey build() { try { SpartansKey record = new SpartansKey(); record.patientMrnIdentifier = fieldSetFlags()[0] ? this.patientMrnIdentifier : (java.lang.CharSequence) defaultValue(fields()[0]); return record; } catch (org.apache.avro.AvroMissingFieldException e) { throw e; } catch (java.lang.Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } } @SuppressWarnings(\"unchecked\") private static final org.apache.avro.io.DatumWriter WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); @Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); } @SuppressWarnings(\"unchecked\") private static final org.apache.avro.io.DatumReader READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); @Override public void readExternal(java.io.ObjectInput in) throws java.io.IOException { READER$.read(this, SpecificData.getDecoder(in)); } @Override protected boolean hasCustomCoders() { return true; } @Override public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { if (this.patientMrnIdentifier == null) { out.writeIndex(0); out.writeNull(); } else { out.writeIndex(1); out.writeString(this.patientMrnIdentifier); } } @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); if (fieldOrder == null) { if (in.readIndex() != 1) { in.readNull(); this.patientMrnIdentifier = null; } else { this.patientMrnIdentifier = in.readString(this.patientMrnIdentifier instanceof Utf8 ? (Utf8)this.patientMrnIdentifier : null); } } else { for (int i = 0; i < 1; i++) { switch (fieldOrder[i].pos()) { case 0: if (in.readIndex() != 1) { in.readNull(); this.patientMrnIdentifier = null; } else { this.patientMrnIdentifier = in.readString(this.patientMrnIdentifier instanceof Utf8 ? (Utf8)this.patientMrnIdentifier : null); } break; default: throw new java.io.IOException(\"Corrupt ResolvingDecoder.\"); } } } }" + } +] \ No newline at end of file diff --git a/codebase_understanding/loader/__init__.py b/unoplat-code-confluence/loader/__init__.py similarity index 100% rename from codebase_understanding/loader/__init__.py rename to unoplat-code-confluence/loader/__init__.py diff --git a/codebase_understanding/loader/iload_json.py b/unoplat-code-confluence/loader/iload_json.py similarity index 100% rename from codebase_understanding/loader/iload_json.py rename to unoplat-code-confluence/loader/iload_json.py diff --git a/codebase_understanding/loader/iparse_json.py b/unoplat-code-confluence/loader/iparse_json.py similarity index 100% rename from codebase_understanding/loader/iparse_json.py rename to unoplat-code-confluence/loader/iparse_json.py diff --git a/codebase_understanding/loader/json_loader.py b/unoplat-code-confluence/loader/json_loader.py similarity index 100% rename from codebase_understanding/loader/json_loader.py rename to unoplat-code-confluence/loader/json_loader.py diff --git a/unoplat-code-confluence/loader/parse_json.py b/unoplat-code-confluence/loader/parse_json.py new file mode 100644 index 00000000..c6670830 --- /dev/null +++ b/unoplat-code-confluence/loader/parse_json.py @@ -0,0 +1,62 @@ +from typing import Dict, List +from pydantic import ValidationError +from data_models.chapi_unoplat_codebase import UnoplatCodebase +from data_models.chapi_unoplat_package import UnoplatPackage +from data_models.dspy.dspy_unoplat_fs_annotation_subset import DspyUnoplatAnnotationSubset +from data_models.dspy.dspy_unoplat_fs_function_call_subset import DspyUnoplatFunctionCallSubset +from data_models.dspy.dspy_unoplat_fs_function_subset import DspyUnoplatFunctionSubset +from data_models.dspy.dspy_unoplat_fs_node_subset import DspyUnoplatNodeSubset +from loader.iparse_json import IParseJson +from data_models.chapi_unoplat_node import Node +from loguru import logger +from nodeparser.isummariser import ISummariser + +class JsonParser(IParseJson): + def parse_json_to_nodes(self, json_data: dict, isummariser: ISummariser = None) -> UnoplatCodebase: + """Concrete implementation of the parse_json_to_nodes method.""" + unoplat_codebase = UnoplatCodebase() + + unoplat_package = UnoplatPackage() + + unoplat_package_dict: Dict[str,List[DspyUnoplatNodeSubset]] = {} + + for item in json_data: + try: + node = Node(**item) + # Creating node subset + node_subset = DspyUnoplatNodeSubset( + NodeName=node.node_name, + Imports=node.imports, + Extend=node.extend, + MultipleExtend=node.multiple_extend, + Fields=node.fields, + Annotations=[DspyUnoplatAnnotationSubset(Name=annotation.name,KeyValues=annotation.key_values) for annotation in node.annotations]) + function_subset_list = [] + + # Creating list function subset + + for func in node.functions: + function_subset = DspyUnoplatFunctionSubset( + Name=func.name, + ReturnType=func.return_type, + Annotations=[DspyUnoplatAnnotationSubset(Name=annotation.name,KeyValues=annotation.key_values) for annotation in node.annotations], + LocalVariables=func.local_variables, + Content=func.content, + FunctionCalls=[DspyUnoplatFunctionCallSubset(NodeName=call.node_name, FunctionName=call.function_name, Parameters=call.parameters) for call in func.function_calls]) + function_subset_list.append(function_subset) + + node_subset.functions = function_subset_list + + if node.package in unoplat_package_dict: + print("added package",node.package) + unoplat_package_dict[node.package].append(node_subset) + else: + list_node_subset: List[DspyUnoplatNodeSubset] = [] + list_node_subset.append(node_subset) + unoplat_package_dict[node.package] = list_node_subset + except Exception as e: + logger.error(f"Error processing node: {e}") + unoplat_package.package_dict = unoplat_package_dict + unoplat_codebase.packages = unoplat_package + + return unoplat_codebase \ No newline at end of file diff --git a/codebase_understanding/loguru.json b/unoplat-code-confluence/loguru.json similarity index 100% rename from codebase_understanding/loguru.json rename to unoplat-code-confluence/loguru.json diff --git a/codebase_understanding/main_old.py b/unoplat-code-confluence/main_old.py similarity index 100% rename from codebase_understanding/main_old.py rename to unoplat-code-confluence/main_old.py diff --git a/codebase_understanding/mock_node.json b/unoplat-code-confluence/mock_node.json similarity index 100% rename from codebase_understanding/mock_node.json rename to unoplat-code-confluence/mock_node.json diff --git a/codebase_understanding/nodeparser/__init__.py b/unoplat-code-confluence/nodeparser/__init__.py similarity index 100% rename from codebase_understanding/nodeparser/__init__.py rename to unoplat-code-confluence/nodeparser/__init__.py diff --git a/codebase_understanding/nodeparser/isummariser.py b/unoplat-code-confluence/nodeparser/isummariser.py similarity index 100% rename from codebase_understanding/nodeparser/isummariser.py rename to unoplat-code-confluence/nodeparser/isummariser.py diff --git a/codebase_understanding/nodeparser/nodesummariser.py b/unoplat-code-confluence/nodeparser/nodesummariser.py similarity index 96% rename from codebase_understanding/nodeparser/nodesummariser.py rename to unoplat-code-confluence/nodeparser/nodesummariser.py index bc0e8d1c..893551d1 100644 --- a/codebase_understanding/nodeparser/nodesummariser.py +++ b/unoplat-code-confluence/nodeparser/nodesummariser.py @@ -58,7 +58,7 @@ def _create_summary_prompt(self, node: Node) -> str: internal_calls_str = "" external_calls_str = "" for call in function.function_calls: - call_description = f"`{call.function_name}()` to `Fill in description of what the call is for. Focus on call interactions within the class and outside the class using fields`" + call_description = f"`{call.function_name}()` to `Fill in description of what the call is for. Focus on call interactions within the class and outside the class using fields. Use function metadata and content of the class`" if call.node_name == node.node_name: # Internal call internal_calls_str += f" - {call_description}\n" else: # External call diff --git a/codebase_understanding/nodeparser/tests/__init__.py b/unoplat-code-confluence/nodeparser/tests/__init__.py similarity index 100% rename from codebase_understanding/nodeparser/tests/__init__.py rename to unoplat-code-confluence/nodeparser/tests/__init__.py diff --git a/codebase_understanding/nodeparser/tests/test_nodesummariser.py b/unoplat-code-confluence/nodeparser/tests/test_nodesummariser.py similarity index 100% rename from codebase_understanding/nodeparser/tests/test_nodesummariser.py rename to unoplat-code-confluence/nodeparser/tests/test_nodesummariser.py diff --git a/codebase_understanding/performance.md b/unoplat-code-confluence/performance.md similarity index 100% rename from codebase_understanding/performance.md rename to unoplat-code-confluence/performance.md diff --git a/codebase_understanding/pyproject.toml b/unoplat-code-confluence/pyproject.toml similarity index 77% rename from codebase_understanding/pyproject.toml rename to unoplat-code-confluence/pyproject.toml index e224dd26..375ecea0 100644 --- a/codebase_understanding/pyproject.toml +++ b/unoplat-code-confluence/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] -name = "dspyplayground" +name = "unoplat_code_confluence" version = "0.1.0" -description = "" +description = "codebase understanding" authors = ["JayGhiya "] readme = "README.md" @@ -18,8 +18,12 @@ pydantic-settings = "^2.2.1" litellm = "^1.37.19" pytest = "^8.2.1" crewai = {extras = ["tools"], version = "^0.30.11"} +dspy-ai = "^2.4.9" +[tool.poetry.group.dev.dependencies] +ipykernel = "^6.29.4" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/codebase_understanding/settings/__init__.py b/unoplat-code-confluence/settings/__init__.py similarity index 100% rename from codebase_understanding/settings/__init__.py rename to unoplat-code-confluence/settings/__init__.py diff --git a/codebase_understanding/settings/appsettings.py b/unoplat-code-confluence/settings/appsettings.py similarity index 100% rename from codebase_understanding/settings/appsettings.py rename to unoplat-code-confluence/settings/appsettings.py diff --git a/codebase_understanding/summarised_node_standard.txt b/unoplat-code-confluence/summarised_node_standard.txt similarity index 100% rename from codebase_understanding/summarised_node_standard.txt rename to unoplat-code-confluence/summarised_node_standard.txt diff --git a/unoplat-code-understanding-mvp.png b/unoplat-code-confluence/unoplat-code-understanding-mvp.png similarity index 100% rename from unoplat-code-understanding-mvp.png rename to unoplat-code-confluence/unoplat-code-understanding-mvp.png diff --git a/unoplat-code-confluence/unoplat_code_confluence/__init__.py b/unoplat-code-confluence/unoplat_code_confluence/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/codebase_understanding/utility/total_file_count.py b/unoplat-code-confluence/utility/total_file_count.py similarity index 100% rename from codebase_understanding/utility/total_file_count.py rename to unoplat-code-confluence/utility/total_file_count.py