From 4d0c9415a2eafe7eef4933d82f140c34649e9b66 Mon Sep 17 00:00:00 2001 From: speakeasybot Date: Mon, 16 Dec 2024 16:51:04 +0000 Subject: [PATCH] ci: regenerated with OpenAPI Doc , Speakeasy CLI 1.455.6 --- .speakeasy/gen.lock | 114 ++- .speakeasy/workflow.lock | 19 +- .speakeasy/workflow.yaml | 2 +- README.md | 153 +-- RELEASES.md | 12 +- USAGE.md | 24 +- .../addfilesgraphgraphnamefilespostrequest.md | 9 - ...postinsertfilesgraphgraphnamefilespost.md} | 2 +- docs/models/configurerequest.md | 2 + docs/models/configureresponse.md | 2 + .../creategraphgraphgraphnamepostrequest.md | 2 +- ...etegraphgraphgraphnamedeletepostrequest.md | 8 + ...oryrequest.md => downloadgraphresponse.md} | 9 +- ...ongraphgraphnameconfigurationgetrequest.md | 2 +- ...tgraphmlgraphgraphnamegraphmlgetrequest.md | 8 + ...raphgraphnameinsertrequestidgetrequest.md} | 4 +- ...graphgraphnamequeryrequestidgetrequest.md} | 4 +- ...raphgetrequest.md => graphlistresponse.md} | 6 +- docs/models/insertrequest.md | 12 + docs/models/insertresponse.md | 12 +- docs/models/planresponse.md | 13 + ...ngraphgraphnameconfigurationpostrequest.md | 2 +- ...sertfilesgraphgraphnamefilespostrequest.md | 9 + ...stinsertgraphgraphnameinsertpostrequest.md | 4 +- ...postquerygraphgraphnamequerypostrequest.md | 2 +- docs/models/queryrequest.md | 10 +- docs/models/queryresponse.md | 10 +- docs/sdks/circlemindsdk/README.md | 363 ++++--- poetry.lock | 277 ++--- pyproject.toml | 10 +- src/circlemind_sdk/_version.py | 2 +- src/circlemind_sdk/basesdk.py | 20 +- src/circlemind_sdk/httpclient.py | 6 + src/circlemind_sdk/models/__init__.py | 73 +- ...add_files_graph_graph_name_files_postop.py | 60 -- ...sert_files_graph_graph_name_files_post.py} | 6 +- src/circlemind_sdk/models/configurerequest.py | 4 + .../models/configureresponse.py | 4 + .../create_graph_graph_graph_name_postop.py | 38 +- ...te_graph_graph_graph_name_delete_postop.py | 16 + ...oryrequest.py => downloadgraphresponse.py} | 23 +- ...on_graph_graph_name_configuration_getop.py | 38 +- ..._graphml_graph_graph_name_graphml_getop.py | 16 + ...raph_graph_name_insert_request_id_getop.py | 61 -- ...raph_graph_name_insert_request_id_getop.py | 29 + ...graph_graph_name_query_request_id_getop.py | 61 -- ...graph_graph_name_query_request_id_getop.py | 29 + .../models/graphlistresponse.py | 20 + ...h_list_graph_getop.py => insertrequest.py} | 41 +- src/circlemind_sdk/models/insertresponse.py | 18 +- src/circlemind_sdk/models/planresponse.py | 31 + ...n_graph_graph_name_configuration_postop.py | 38 +- ...ert_files_graph_graph_name_files_postop.py | 28 + ...t_insert_graph_graph_name_insert_postop.py | 46 +- ...ost_query_graph_graph_name_query_postop.py | 38 +- src/circlemind_sdk/models/queryrequest.py | 16 + src/circlemind_sdk/models/queryresponse.py | 8 + src/circlemind_sdk/models/validationerror.py | 6 +- src/circlemind_sdk/sdk.py | 960 ++++++++++++++---- src/circlemind_sdk/sdkconfiguration.py | 8 +- src/circlemind_sdk/utils/annotations.py | 59 +- src/circlemind_sdk/utils/eventstreaming.py | 62 +- src/circlemind_sdk/utils/forms.py | 14 +- src/circlemind_sdk/utils/requestbodies.py | 2 +- 64 files changed, 1934 insertions(+), 1053 deletions(-) delete mode 100644 docs/models/addfilesgraphgraphnamefilespostrequest.md rename docs/models/{bodyaddfilesgraphgraphnamefilespost.md => bodypostinsertfilesgraphgraphnamefilespost.md} (92%) create mode 100644 docs/models/deletegraphgraphgraphnamedeletepostrequest.md rename docs/models/{memoryrequest.md => downloadgraphresponse.md} (55%) create mode 100644 docs/models/getgraphmlgraphgraphnamegraphmlgetrequest.md rename docs/models/{getinserthandlergraphgraphnameinsertrequestidgetrequest.md => getinsertstatusgraphgraphnameinsertrequestidgetrequest.md} (75%) rename docs/models/{getqueryhandlergraphgraphnamequeryrequestidgetrequest.md => getquerystatusgraphgraphnamequeryrequestidgetrequest.md} (75%) rename docs/models/{getgraphlistgraphgetrequest.md => graphlistresponse.md} (58%) create mode 100644 docs/models/insertrequest.md create mode 100644 docs/models/planresponse.md create mode 100644 docs/models/postinsertfilesgraphgraphnamefilespostrequest.md delete mode 100644 src/circlemind_sdk/models/add_files_graph_graph_name_files_postop.py rename src/circlemind_sdk/models/{body_add_files_graph_graph_name_files_post.py => body_post_insert_files_graph_graph_name_files_post.py} (82%) create mode 100644 src/circlemind_sdk/models/delete_graph_graph_graph_name_delete_postop.py rename src/circlemind_sdk/models/{memoryrequest.py => downloadgraphresponse.py} (71%) create mode 100644 src/circlemind_sdk/models/get_graphml_graph_graph_name_graphml_getop.py delete mode 100644 src/circlemind_sdk/models/get_insert_handler_graph_graph_name_insert_request_id_getop.py create mode 100644 src/circlemind_sdk/models/get_insert_status_graph_graph_name_insert_request_id_getop.py delete mode 100644 src/circlemind_sdk/models/get_query_handler_graph_graph_name_query_request_id_getop.py create mode 100644 src/circlemind_sdk/models/get_query_status_graph_graph_name_query_request_id_getop.py create mode 100644 src/circlemind_sdk/models/graphlistresponse.py rename src/circlemind_sdk/models/{get_graph_list_graph_getop.py => insertrequest.py} (52%) create mode 100644 src/circlemind_sdk/models/planresponse.py create mode 100644 src/circlemind_sdk/models/post_insert_files_graph_graph_name_files_postop.py diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 1f0a470..1c4d943 100644 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,19 +1,19 @@ lockVersion: 2.0.0 id: e543488a-81b3-4702-862f-2a1cfea780cc management: - docChecksum: 1640a83d51c038c3f5b9343a1ee11f3c - docVersion: 0.3.0 - speakeasyVersion: 1.441.0 - generationVersion: 2.460.1 - releaseVersion: 0.1.0 - configChecksum: dc05d1ee351d2d1b94b8edcca5f479c8 + docChecksum: f7b43ac674bea49b7ce5d016589e3e5b + docVersion: 0.0.4 + speakeasyVersion: 1.455.6 + generationVersion: 2.479.7 + releaseVersion: 0.1.0-post0 + configChecksum: 083bdb03619e296ce1850927beaf1a9c repoURL: https://github.com/circlemind-ai/circlemind-sdk.git installationURL: https://github.com/circlemind-ai/circlemind-sdk.git published: true features: python: additionalDependencies: 1.0.0 - core: 5.6.5 + core: 5.7.3 defaultEnabledRetries: 0.2.0 devContainers: 3.0.0 envVarSecurityUsage: 0.3.2 @@ -22,7 +22,6 @@ features: globalSecurityCallbacks: 1.0.0 globalSecurityFlattening: 1.0.0 globalServerURLs: 3.0.0 - ignores: 3.0.0 methodArguments: 1.0.2 multipartFileContentType: 1.0.0 nameOverrides: 3.0.0 @@ -30,7 +29,7 @@ features: responseFormat: 1.0.1 retries: 3.0.2 sdkHooks: 1.0.0 - unions: 3.0.3 + unions: 3.0.4 uploadStreams: 1.0.0 generatedFiles: - .devcontainer/README.md @@ -40,21 +39,25 @@ generatedFiles: - .python-version - .vscode/settings.json - USAGE.md - - docs/models/addfilesgraphgraphnamefilespostrequest.md - - docs/models/bodyaddfilesgraphgraphnamefilespost.md + - docs/models/bodypostinsertfilesgraphgraphnamefilespost.md - docs/models/configurerequest.md - docs/models/configureresponse.md - docs/models/creategraphgraphgraphnamepostrequest.md + - docs/models/deletegraphgraphgraphnamedeletepostrequest.md + - docs/models/downloadgraphresponse.md - docs/models/files.md - docs/models/getgraphconfigurationgraphgraphnameconfigurationgetrequest.md - - docs/models/getgraphlistgraphgetrequest.md - - docs/models/getinserthandlergraphgraphnameinsertrequestidgetrequest.md - - docs/models/getqueryhandlergraphgraphnamequeryrequestidgetrequest.md + - docs/models/getgraphmlgraphgraphnamegraphmlgetrequest.md + - docs/models/getinsertstatusgraphgraphnameinsertrequestidgetrequest.md + - docs/models/getquerystatusgraphgraphnamequeryrequestidgetrequest.md + - docs/models/graphlistresponse.md - docs/models/httpvalidationerror.md + - docs/models/insertrequest.md - docs/models/insertresponse.md - docs/models/loc.md - - docs/models/memoryrequest.md + - docs/models/planresponse.md - docs/models/postgraphconfigurationgraphgraphnameconfigurationpostrequest.md + - docs/models/postinsertfilesgraphgraphnamefilespostrequest.md - docs/models/postinsertgraphgraphnameinsertpostrequest.md - docs/models/postquerygraphgraphnamequerypostrequest.md - docs/models/queryrequest.md @@ -78,19 +81,23 @@ generatedFiles: - src/circlemind_sdk/basesdk.py - src/circlemind_sdk/httpclient.py - src/circlemind_sdk/models/__init__.py - - src/circlemind_sdk/models/add_files_graph_graph_name_files_postop.py - - src/circlemind_sdk/models/body_add_files_graph_graph_name_files_post.py + - src/circlemind_sdk/models/body_post_insert_files_graph_graph_name_files_post.py - src/circlemind_sdk/models/configurerequest.py - src/circlemind_sdk/models/configureresponse.py - src/circlemind_sdk/models/create_graph_graph_graph_name_postop.py + - src/circlemind_sdk/models/delete_graph_graph_graph_name_delete_postop.py + - src/circlemind_sdk/models/downloadgraphresponse.py - src/circlemind_sdk/models/get_graph_configuration_graph_graph_name_configuration_getop.py - - src/circlemind_sdk/models/get_graph_list_graph_getop.py - - src/circlemind_sdk/models/get_insert_handler_graph_graph_name_insert_request_id_getop.py - - src/circlemind_sdk/models/get_query_handler_graph_graph_name_query_request_id_getop.py + - src/circlemind_sdk/models/get_graphml_graph_graph_name_graphml_getop.py + - src/circlemind_sdk/models/get_insert_status_graph_graph_name_insert_request_id_getop.py + - src/circlemind_sdk/models/get_query_status_graph_graph_name_query_request_id_getop.py + - src/circlemind_sdk/models/graphlistresponse.py - src/circlemind_sdk/models/httpvalidationerror.py + - src/circlemind_sdk/models/insertrequest.py - src/circlemind_sdk/models/insertresponse.py - - src/circlemind_sdk/models/memoryrequest.py + - src/circlemind_sdk/models/planresponse.py - src/circlemind_sdk/models/post_graph_configuration_graph_graph_name_configuration_postop.py + - src/circlemind_sdk/models/post_insert_files_graph_graph_name_files_postop.py - src/circlemind_sdk/models/post_insert_graph_graph_name_insert_postop.py - src/circlemind_sdk/models/post_query_graph_graph_name_query_postop.py - src/circlemind_sdk/models/queryrequest.py @@ -144,7 +151,7 @@ examples: speakeasy-default-get-graph-list-graph-get: responses: "200": - application/json: "" + application/json: {"graphs": ["", "", ""]} "422": {} create_graph_graph__graph_id__post: speakeasy-default-create-graph-graph-graph-id-post: @@ -220,7 +227,7 @@ examples: application/json: {"domain": "liquid-godfather.org", "exampleQueries": "", "entityTypes": ["", "", ""]} responses: "200": - application/json: "" + application/json: {"domain": "liquid-godfather.org", "exampleQueries": "", "entityTypes": ["", "", ""]} "422": {} post_query_graph__graph_name__query_post: speakeasy-default-post-query-graph-graph-name-query-post: @@ -254,7 +261,7 @@ examples: application/json: {"memory": ""} responses: "200": - application/json: {"memoryId": "", "requestId": "", "requestTime": ""} + application/json: {"requestId": "", "requestTime": "", "memoryId": ""} "422": {} add_files_graph__graph_name__files_post: speakeasy-default-add-files-graph-graph-name-files-post: @@ -279,3 +286,62 @@ examples: "200": application/json: {"status": "", "answer": "", "context": ""} "422": {} + get_user_plan_plan_get: + speakeasy-default-get-user-plan-plan-get: + responses: + "200": + application/json: {"requestsCount": "9154.04", "requestsMax": 971845, "planId": "", "planTTL": 567555} + delete_graph_graph__graph_name__delete_post: + speakeasy-default-delete-graph-graph-graph-name-delete-post: + parameters: + path: + graph_name: "" + responses: + "200": + application/json: "" + "422": {} + get_graphml_graph__graph_name__graphml_get: + speakeasy-default-get-graphml-graph-graph-name-graphml-get: + parameters: + path: + graph_name: "" + responses: + "200": + application/json: {"presignedUrl": "https://amazing-understanding.org/"} + "422": {} + get_query_status_graph__graph_name__query__request_id__get: + speakeasy-default-get-query-status-graph-graph-name-query-request-id-get: + parameters: + path: + graph_name: "" + request_id: "" + query: + requestTime: 816039 + responses: + "200": + application/json: {"status": "", "answer": "", "context": ""} + "422": {} + post_insert_files_graph__graph_name__files_post: + speakeasy-default-post-insert-files-graph-graph-name-files-post: + parameters: + path: + graph_name: "" + requestBody: + multipart/form-data: {"files": [{}, {}]} + responses: + "200": + application/json: {"requestId": "", "requestTime": 296236, "memoryId": ""} + "422": {} + get_insert_status_graph__graph_name__insert__request_id__get: + speakeasy-default-get-insert-status-graph-graph-name-insert-request-id-get: + parameters: + path: + graph_name: "" + request_id: "" + query: + requestTime: 877284 + responses: + "200": + application/json: {"status": "", "answer": "", "context": ""} + "422": {} +generatedTests: {} diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 76a6413..b544cb7 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -1,20 +1,21 @@ -speakeasyVersion: 1.441.0 +speakeasyVersion: 1.455.6 sources: CirclemindSDK-OAS: sourceNamespace: circlemind-sdk-oas - sourceRevisionDigest: sha256:923be31e0c441aa56b52078d053d818741d896d270cd722de171206549844efa - sourceBlobDigest: sha256:bc632a8eddce0a78d4f3f4e0b623c64c58f0a7d8271b4919de69bb918ac792cc + sourceRevisionDigest: sha256:2758caf5474f484de1a4bfac29c043126cf260fad4633b4d85419cdccdd1f847 + sourceBlobDigest: sha256:16c2e2296dd00719388d62f41b67b54b68c890a4cb2d8c6cf418576dc6a80565 tags: - latest - - speakeasy-sdk-regen-1732061009 + - speakeasy-sdk-regen-1734367818 + - 0.0.4 targets: circlemind-sdk: source: CirclemindSDK-OAS sourceNamespace: circlemind-sdk-oas - sourceRevisionDigest: sha256:923be31e0c441aa56b52078d053d818741d896d270cd722de171206549844efa - sourceBlobDigest: sha256:bc632a8eddce0a78d4f3f4e0b623c64c58f0a7d8271b4919de69bb918ac792cc - codeSamplesNamespace: circlemind-sdk-oas-code-samples - codeSamplesRevisionDigest: sha256:f2c67827b1da134c8646849a7f04c892367d18f71fcd6ee9fbcd2b260f26a6f1 + sourceRevisionDigest: sha256:2758caf5474f484de1a4bfac29c043126cf260fad4633b4d85419cdccdd1f847 + sourceBlobDigest: sha256:16c2e2296dd00719388d62f41b67b54b68c890a4cb2d8c6cf418576dc6a80565 + codeSamplesNamespace: circlemind-sdk-oas-python-code-samples + codeSamplesRevisionDigest: sha256:70bb068b0f3014dfd9edfccd4aa5df6fbe8fb7f3c12f35adb26aad151cd00aa7 workflow: workflowVersion: 1.0.0 speakeasyVersion: latest @@ -33,5 +34,5 @@ workflow: token: $pypi_token codeSamples: registry: - location: registry.speakeasyapi.dev/circlemind/circlemind/circlemind-sdk-oas-code-samples + location: registry.speakeasyapi.dev/circlemind/circlemind/circlemind-sdk-oas-python-code-samples blocking: false diff --git a/.speakeasy/workflow.yaml b/.speakeasy/workflow.yaml index 731c801..e4eca98 100644 --- a/.speakeasy/workflow.yaml +++ b/.speakeasy/workflow.yaml @@ -15,5 +15,5 @@ targets: token: $pypi_token codeSamples: registry: - location: registry.speakeasyapi.dev/circlemind/circlemind/circlemind-sdk-oas-code-samples + location: registry.speakeasyapi.dev/circlemind/circlemind/circlemind-sdk-oas-python-code-samples blocking: false diff --git a/README.md b/README.md index eac17cf..02493cb 100644 --- a/README.md +++ b/README.md @@ -20,17 +20,22 @@ Developer-friendly & type-safe Python SDK specifically catered to leverage *circ ## Table of Contents + +* [circlemind-sdk](#circlemind-sdk) + * [SDK Installation](#sdk-installation) + * [IDE Support](#ide-support) + * [SDK Example Usage](#sdk-example-usage) + * [Available Resources and Operations](#available-resources-and-operations) + * [Retries](#retries) + * [Error Handling](#error-handling) + * [Server Selection](#server-selection) + * [Custom HTTP Client](#custom-http-client) + * [Authentication](#authentication) + * [Debugging](#debugging) +* [Development](#development) + * [Maturity](#maturity) + * [Contributions](#contributions) -* [SDK Installation](#sdk-installation) -* [IDE Support](#ide-support) -* [SDK Example Usage](#sdk-example-usage) -* [Available Resources and Operations](#available-resources-and-operations) -* [Retries](#retries) -* [Error Handling](#error-handling) -* [Server Selection](#server-selection) -* [Custom HTTP Client](#custom-http-client) -* [Authentication](#authentication) -* [Debugging](#debugging) @@ -75,15 +80,14 @@ Generally, the SDK will work well with most IDEs out of the box. However, when u from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_configuration(graph_name="") + res = circlemind_sdk.get_user_plan_plan_get() -if res is not None: - # handle response - pass + # Handle response + print(res) ```
@@ -96,13 +100,14 @@ from circlemind_sdk import CirclemindSDK import os async def main(): - s = CirclemindSDK( + async with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), - ) - res = await s.get_graph_configuration_async(graph_name="") - if res is not None: - # handle response - pass + ) as circlemind_sdk: + + res = await circlemind_sdk.get_user_plan_plan_get_async() + + # Handle response + print(res) asyncio.run(main()) ``` @@ -116,15 +121,18 @@ asyncio.run(main()) ### [CirclemindSDK](docs/sdks/circlemindsdk/README.md) -* [get_graph_configuration](docs/sdks/circlemindsdk/README.md#get_graph_configuration) - Get Graph Configuration -* [create_graph_configuration](docs/sdks/circlemindsdk/README.md#create_graph_configuration) - Post Graph Configuration -* [get_graph_list](docs/sdks/circlemindsdk/README.md#get_graph_list) - Get Graph List -* [create_graph](docs/sdks/circlemindsdk/README.md#create_graph) - Create Graph -* [create_query](docs/sdks/circlemindsdk/README.md#create_query) - Post Query -* [get_query_handler](docs/sdks/circlemindsdk/README.md#get_query_handler) - Get Query Handler -* [create_insert](docs/sdks/circlemindsdk/README.md#create_insert) - Post Insert -* [create_graph_files](docs/sdks/circlemindsdk/README.md#create_graph_files) - Add Files -* [get_insert_handler](docs/sdks/circlemindsdk/README.md#get_insert_handler) - Get Insert Handler +* [get_user_plan_plan_get](docs/sdks/circlemindsdk/README.md#get_user_plan_plan_get) - User plan +* [get_graph_configuration](docs/sdks/circlemindsdk/README.md#get_graph_configuration) - Graph configuration (get) +* [set_graph_configuration](docs/sdks/circlemindsdk/README.md#set_graph_configuration) - Graph configuration (set) +* [list_graphs](docs/sdks/circlemindsdk/README.md#list_graphs) - List graphs +* [create_graph](docs/sdks/circlemindsdk/README.md#create_graph) - Create new graph +* [delete_graph](docs/sdks/circlemindsdk/README.md#delete_graph) - Delete existing graph +* [download_graphml](docs/sdks/circlemindsdk/README.md#download_graphml) - Download graphml +* [query](docs/sdks/circlemindsdk/README.md#query) - Query memory +* [get_query_status](docs/sdks/circlemindsdk/README.md#get_query_status) - Check query request status +* [add](docs/sdks/circlemindsdk/README.md#add) - Add memory +* [add_from_files](docs/sdks/circlemindsdk/README.md#add_from_files) - Add memory (from files) +* [get_add_status](docs/sdks/circlemindsdk/README.md#get_add_status) - Check add request status @@ -137,38 +145,36 @@ Some of the endpoints in this SDK support retries. If you use the SDK without an To change the default retry strategy for a single API call, simply provide a `RetryConfig` object to the call: ```python from circlemind_sdk import CirclemindSDK -from circlemindsdk.utils import BackoffStrategy, RetryConfig +from circlemind_sdk.utils import BackoffStrategy, RetryConfig import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_configuration(graph_name="", - RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False)) + res = circlemind_sdk.get_user_plan_plan_get(, + RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False)) -if res is not None: - # handle response - pass + # Handle response + print(res) ``` If you'd like to override the default retry strategy for all operations that support retries, you can use the `retry_config` optional parameter when initializing the SDK: ```python from circlemind_sdk import CirclemindSDK -from circlemindsdk.utils import BackoffStrategy, RetryConfig +from circlemind_sdk.utils import BackoffStrategy, RetryConfig import os -s = CirclemindSDK( +with CirclemindSDK( retry_config=RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False), api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_configuration(graph_name="") + res = circlemind_sdk.get_user_plan_plan_get() -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -200,24 +206,23 @@ When custom error responses are specified for an operation, the SDK may also rai from circlemind_sdk import CirclemindSDK, models import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) - -res = None -try: - res = s.get_graph_configuration(graph_name="") - - if res is not None: - # handle response - pass - -except models.HTTPValidationError as e: - # handle e.data: models.HTTPValidationErrorData - raise(e) -except models.SDKError as e: - # handle exception - raise(e) +) as circlemind_sdk: + res = None + try: + + res = circlemind_sdk.get_graph_configuration(graph_name="") + + # Handle response + print(res) + + except models.HTTPValidationError as e: + # handle e.data: models.HTTPValidationErrorData + raise(e) + except models.SDKError as e: + # handle exception + raise(e) ``` @@ -231,16 +236,15 @@ The default server can also be overridden globally by passing a URL to the `serv from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( server_url="https://api.circlemind.co", api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_configuration(graph_name="") + res = circlemind_sdk.get_user_plan_plan_get() -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -342,15 +346,14 @@ To authenticate with the API the `api_key_header` parameter must be set when ini from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_configuration(graph_name="") + res = circlemind_sdk.get_user_plan_plan_get() -if res is not None: - # handle response - pass + # Handle response + print(res) ``` diff --git a/RELEASES.md b/RELEASES.md index eddae45..bce7940 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -18,4 +18,14 @@ Based on: ### Generated - [python v0.1.0] . ### Releases -- [PyPI v0.1.0] https://pypi.org/project/circlemind-sdk/0.1.0 - . \ No newline at end of file +- [PyPI v0.1.0] https://pypi.org/project/circlemind-sdk/0.1.0 - . + +## 2024-12-16 16:50:13 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.455.6 (2.479.7) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.1.0-post0] . +### Releases +- [PyPI v0.1.0-post0] https://pypi.org/project/circlemind-sdk/0.1.0-post0 - . \ No newline at end of file diff --git a/USAGE.md b/USAGE.md index 54fba81..0f4a7e7 100644 --- a/USAGE.md +++ b/USAGE.md @@ -4,15 +4,14 @@ from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_configuration(graph_name="") + res = circlemind_sdk.get_user_plan_plan_get() -if res is not None: - # handle response - pass + # Handle response + print(res) ```
@@ -25,13 +24,14 @@ from circlemind_sdk import CirclemindSDK import os async def main(): - s = CirclemindSDK( + async with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), - ) - res = await s.get_graph_configuration_async(graph_name="") - if res is not None: - # handle response - pass + ) as circlemind_sdk: + + res = await circlemind_sdk.get_user_plan_plan_get_async() + + # Handle response + print(res) asyncio.run(main()) ``` diff --git a/docs/models/addfilesgraphgraphnamefilespostrequest.md b/docs/models/addfilesgraphgraphnamefilespostrequest.md deleted file mode 100644 index ac16748..0000000 --- a/docs/models/addfilesgraphgraphnamefilespostrequest.md +++ /dev/null @@ -1,9 +0,0 @@ -# AddFilesGraphGraphNameFilesPostRequest - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `body_add_files_graph_graph_name_files_post` | [models.BodyAddFilesGraphGraphNameFilesPost](../models/bodyaddfilesgraphgraphnamefilespost.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/bodyaddfilesgraphgraphnamefilespost.md b/docs/models/bodypostinsertfilesgraphgraphnamefilespost.md similarity index 92% rename from docs/models/bodyaddfilesgraphgraphnamefilespost.md rename to docs/models/bodypostinsertfilesgraphgraphnamefilespost.md index 854a005..9097b9e 100644 --- a/docs/models/bodyaddfilesgraphgraphnamefilespost.md +++ b/docs/models/bodypostinsertfilesgraphgraphnamefilespost.md @@ -1,4 +1,4 @@ -# BodyAddFilesGraphGraphNameFilesPost +# BodyPostInsertFilesGraphGraphNameFilesPost ## Fields diff --git a/docs/models/configurerequest.md b/docs/models/configurerequest.md index 1d4bc57..42f1977 100644 --- a/docs/models/configurerequest.md +++ b/docs/models/configurerequest.md @@ -1,5 +1,7 @@ # ConfigureRequest +Data model for graph configuration request. + ## Fields diff --git a/docs/models/configureresponse.md b/docs/models/configureresponse.md index 7855571..856c08b 100644 --- a/docs/models/configureresponse.md +++ b/docs/models/configureresponse.md @@ -1,5 +1,7 @@ # ConfigureResponse +Data model for graph configuration response. + ## Fields diff --git a/docs/models/creategraphgraphgraphnamepostrequest.md b/docs/models/creategraphgraphgraphnamepostrequest.md index 39a1344..6a05a33 100644 --- a/docs/models/creategraphgraphgraphnamepostrequest.md +++ b/docs/models/creategraphgraphgraphnamepostrequest.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `configure_request` | [models.ConfigureRequest](../models/configurerequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/deletegraphgraphgraphnamedeletepostrequest.md b/docs/models/deletegraphgraphgraphnamedeletepostrequest.md new file mode 100644 index 0000000..1907d88 --- /dev/null +++ b/docs/models/deletegraphgraphgraphnamedeletepostrequest.md @@ -0,0 +1,8 @@ +# DeleteGraphGraphGraphNameDeletePostRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `graph_name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/memoryrequest.md b/docs/models/downloadgraphresponse.md similarity index 55% rename from docs/models/memoryrequest.md rename to docs/models/downloadgraphresponse.md index 5dbaaf3..f1bb23c 100644 --- a/docs/models/memoryrequest.md +++ b/docs/models/downloadgraphresponse.md @@ -1,10 +1,11 @@ -# MemoryRequest +# DownloadGraphResponse + +Data model for graph download response. ## Fields | Field | Type | Required | Description | | ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `memory` | *str* | :heavy_check_mark: | N/A | -| `memory_id` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | -| `metadata` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| `presigned_url` | *str* | :heavy_check_mark: | N/A | +| `last_modified` | *OptionalNullable[int]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/getgraphconfigurationgraphgraphnameconfigurationgetrequest.md b/docs/models/getgraphconfigurationgraphgraphnameconfigurationgetrequest.md index b79dc7e..491c9da 100644 --- a/docs/models/getgraphconfigurationgraphgraphnameconfigurationgetrequest.md +++ b/docs/models/getgraphconfigurationgraphgraphnameconfigurationgetrequest.md @@ -5,4 +5,4 @@ | Field | Type | Required | Description | | ------------------ | ------------------ | ------------------ | ------------------ | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | \ No newline at end of file +| `graph_name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/getgraphmlgraphgraphnamegraphmlgetrequest.md b/docs/models/getgraphmlgraphgraphnamegraphmlgetrequest.md new file mode 100644 index 0000000..dcac8bb --- /dev/null +++ b/docs/models/getgraphmlgraphgraphnamegraphmlgetrequest.md @@ -0,0 +1,8 @@ +# GetGraphmlGraphGraphNameGraphmlGetRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `graph_name` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/getinserthandlergraphgraphnameinsertrequestidgetrequest.md b/docs/models/getinsertstatusgraphgraphnameinsertrequestidgetrequest.md similarity index 75% rename from docs/models/getinserthandlergraphgraphnameinsertrequestidgetrequest.md rename to docs/models/getinsertstatusgraphgraphnameinsertrequestidgetrequest.md index 5710b5a..acf5a50 100644 --- a/docs/models/getinserthandlergraphgraphnameinsertrequestidgetrequest.md +++ b/docs/models/getinsertstatusgraphgraphnameinsertrequestidgetrequest.md @@ -1,10 +1,10 @@ -# GetInsertHandlerGraphGraphNameInsertRequestIDGetRequest +# GetInsertStatusGraphGraphNameInsertRequestIDGetRequest ## Fields | Field | Type | Required | Description | | ------------------ | ------------------ | ------------------ | ------------------ | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `request_id` | *str* | :heavy_check_mark: | N/A | | `request_time` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/getqueryhandlergraphgraphnamequeryrequestidgetrequest.md b/docs/models/getquerystatusgraphgraphnamequeryrequestidgetrequest.md similarity index 75% rename from docs/models/getqueryhandlergraphgraphnamequeryrequestidgetrequest.md rename to docs/models/getquerystatusgraphgraphnamequeryrequestidgetrequest.md index 997e104..ed47dd2 100644 --- a/docs/models/getqueryhandlergraphgraphnamequeryrequestidgetrequest.md +++ b/docs/models/getquerystatusgraphgraphnamequeryrequestidgetrequest.md @@ -1,10 +1,10 @@ -# GetQueryHandlerGraphGraphNameQueryRequestIDGetRequest +# GetQueryStatusGraphGraphNameQueryRequestIDGetRequest ## Fields | Field | Type | Required | Description | | ------------------ | ------------------ | ------------------ | ------------------ | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `request_id` | *str* | :heavy_check_mark: | N/A | | `request_time` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/getgraphlistgraphgetrequest.md b/docs/models/graphlistresponse.md similarity index 58% rename from docs/models/getgraphlistgraphgetrequest.md rename to docs/models/graphlistresponse.md index 178625a..948cea3 100644 --- a/docs/models/getgraphlistgraphgetrequest.md +++ b/docs/models/graphlistresponse.md @@ -1,8 +1,10 @@ -# GetGraphListGraphGetRequest +# GraphListResponse + +Data model for graph list response. ## Fields | Field | Type | Required | Description | | ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `graph_name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| `graphs` | List[*str*] | :heavy_check_mark: | List of existing graphs | \ No newline at end of file diff --git a/docs/models/insertrequest.md b/docs/models/insertrequest.md new file mode 100644 index 0000000..d08003a --- /dev/null +++ b/docs/models/insertrequest.md @@ -0,0 +1,12 @@ +# InsertRequest + +Data model for insert request. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `memory` | *str* | :heavy_check_mark: | Raw text to be inserted into the graph | +| `metadata` | *OptionalNullable[str]* | :heavy_minus_sign: | Stringified JSON dictionary containing any metadata to link to the given memory.

(i.e., `'{"id": "asdash-234fdsc-erwer-wqes2", "url": "https://example.com"}'`) | +| `memory_id` | *OptionalNullable[str]* | :heavy_minus_sign: | Reserved | \ No newline at end of file diff --git a/docs/models/insertresponse.md b/docs/models/insertresponse.md index 5f2babd..a120574 100644 --- a/docs/models/insertresponse.md +++ b/docs/models/insertresponse.md @@ -1,10 +1,12 @@ # InsertResponse +Data model for insert response. + ## Fields -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `memory_id` | *str* | :heavy_check_mark: | N/A | -| `request_id` | *str* | :heavy_check_mark: | N/A | -| `request_time` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `request_id` | *str* | :heavy_check_mark: | Unique request ID to be used to track the request status | +| `request_time` | *int* | :heavy_check_mark: | Further value to provide to track the request status | +| `memory_id` | *str* | :heavy_check_mark: | Reserved | \ No newline at end of file diff --git a/docs/models/planresponse.md b/docs/models/planresponse.md new file mode 100644 index 0000000..334f26e --- /dev/null +++ b/docs/models/planresponse.md @@ -0,0 +1,13 @@ +# PlanResponse + +Data model for plan response. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | +| `requests_count` | *float* | :heavy_check_mark: | Total number of requests used for the current period | +| `requests_max` | *int* | :heavy_check_mark: | Number of available requests for the active plan | +| `plan_id` | *str* | :heavy_check_mark: | N/A | +| `plan_ttl` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/postgraphconfigurationgraphgraphnameconfigurationpostrequest.md b/docs/models/postgraphconfigurationgraphgraphnameconfigurationpostrequest.md index 35107d4..74889d3 100644 --- a/docs/models/postgraphconfigurationgraphgraphnameconfigurationpostrequest.md +++ b/docs/models/postgraphconfigurationgraphgraphnameconfigurationpostrequest.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `configure_request` | [models.ConfigureRequest](../models/configurerequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/postinsertfilesgraphgraphnamefilespostrequest.md b/docs/models/postinsertfilesgraphgraphnamefilespostrequest.md new file mode 100644 index 0000000..a034437 --- /dev/null +++ b/docs/models/postinsertfilesgraphgraphnamefilespostrequest.md @@ -0,0 +1,9 @@ +# PostInsertFilesGraphGraphNameFilesPostRequest + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | +| `graph_name` | *str* | :heavy_check_mark: | N/A | +| `body_post_insert_files_graph_graph_name_files_post` | [models.BodyPostInsertFilesGraphGraphNameFilesPost](../models/bodypostinsertfilesgraphgraphnamefilespost.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/postinsertgraphgraphnameinsertpostrequest.md b/docs/models/postinsertgraphgraphnameinsertpostrequest.md index 7610106..e111940 100644 --- a/docs/models/postinsertgraphgraphnameinsertpostrequest.md +++ b/docs/models/postinsertgraphgraphnameinsertpostrequest.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `memory_request` | [models.MemoryRequest](../models/memoryrequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| `graph_name` | *str* | :heavy_check_mark: | N/A | +| `insert_request` | [models.InsertRequest](../models/insertrequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/postquerygraphgraphnamequerypostrequest.md b/docs/models/postquerygraphgraphnamequerypostrequest.md index 582eb34..6ab5411 100644 --- a/docs/models/postquerygraphgraphnamequerypostrequest.md +++ b/docs/models/postquerygraphgraphnamequerypostrequest.md @@ -5,5 +5,5 @@ | Field | Type | Required | Description | | ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | ------------------------------------------------ | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `query_request` | [models.QueryRequest](../models/queryrequest.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/queryrequest.md b/docs/models/queryrequest.md index 4b3a87a..17aded2 100644 --- a/docs/models/queryrequest.md +++ b/docs/models/queryrequest.md @@ -1,9 +1,11 @@ # QueryRequest +Data model for query request. + ## Fields -| Field | Type | Required | Description | -| ----------------------- | ----------------------- | ----------------------- | ----------------------- | -| `query` | *str* | :heavy_check_mark: | N/A | -| `parameters` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `query` | *str* | :heavy_check_mark: | N/A | +| `parameters` | *OptionalNullable[str]* | :heavy_minus_sign: | Optional stringified JSON parameters:
- only_context (bool = false): return only the context of the query instead of processing it via an LLM;
- with_references (bool = false): include references in the response.

(i.e., `'{"only_context": true}'`) | \ No newline at end of file diff --git a/docs/models/queryresponse.md b/docs/models/queryresponse.md index a1362be..0e52738 100644 --- a/docs/models/queryresponse.md +++ b/docs/models/queryresponse.md @@ -1,9 +1,11 @@ # QueryResponse +Data model for query response. + ## Fields -| Field | Type | Required | Description | -| ------------------ | ------------------ | ------------------ | ------------------ | -| `request_id` | *str* | :heavy_check_mark: | N/A | -| `request_time` | *int* | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `request_id` | *str* | :heavy_check_mark: | Unique request ID to be used to track the request status | +| `request_time` | *int* | :heavy_check_mark: | Further value to provide to track the request status | \ No newline at end of file diff --git a/docs/sdks/circlemindsdk/README.md b/docs/sdks/circlemindsdk/README.md index 9294f60..e50bfe6 100644 --- a/docs/sdks/circlemindsdk/README.md +++ b/docs/sdks/circlemindsdk/README.md @@ -4,19 +4,59 @@ ### Available Operations -* [get_graph_configuration](#get_graph_configuration) - Get Graph Configuration -* [create_graph_configuration](#create_graph_configuration) - Post Graph Configuration -* [get_graph_list](#get_graph_list) - Get Graph List -* [create_graph](#create_graph) - Create Graph -* [create_query](#create_query) - Post Query -* [get_query_handler](#get_query_handler) - Get Query Handler -* [create_insert](#create_insert) - Post Insert -* [create_graph_files](#create_graph_files) - Add Files -* [get_insert_handler](#get_insert_handler) - Get Insert Handler +* [get_user_plan_plan_get](#get_user_plan_plan_get) - User plan +* [get_graph_configuration](#get_graph_configuration) - Graph configuration (get) +* [set_graph_configuration](#set_graph_configuration) - Graph configuration (set) +* [list_graphs](#list_graphs) - List graphs +* [create_graph](#create_graph) - Create new graph +* [delete_graph](#delete_graph) - Delete existing graph +* [download_graphml](#download_graphml) - Download graphml +* [query](#query) - Query memory +* [get_query_status](#get_query_status) - Check query request status +* [add](#add) - Add memory +* [add_from_files](#add_from_files) - Add memory (from files) +* [get_add_status](#get_add_status) - Check add request status + +## get_user_plan_plan_get + +Return the active plan for the current user and its usage metrics. + +### Example Usage + +```python +from circlemind_sdk import CirclemindSDK +import os + +with CirclemindSDK( + api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), +) as circlemind_sdk: + + res = circlemind_sdk.get_user_plan_plan_get() + + # Handle response + print(res) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[models.PlanResponse](../../models/planresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| models.SDKError | 4XX, 5XX | \*/\* | ## get_graph_configuration -Get Graph Configuration +Retrieve the configuration details of a specific graph by its name. ### Example Usage @@ -24,15 +64,14 @@ Get Graph Configuration from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_configuration(graph_name="") + res = circlemind_sdk.get_graph_configuration(graph_name="") -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -40,7 +79,7 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -54,9 +93,9 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## create_graph_configuration +## set_graph_configuration -Post Graph Configuration +Update the configuration details of a specific graph by its name. ### Example Usage @@ -64,21 +103,20 @@ Post Graph Configuration from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.create_graph_configuration(graph_name="", configure_request={ - "domain": "agitated-cod.name", - "example_queries": "", - "entity_types": [ - "", - ], -}) + res = circlemind_sdk.set_graph_configuration(graph_name="", configure_request={ + "domain": "agitated-cod.name", + "example_queries": "", + "entity_types": [ + "", + ], + }) -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -86,7 +124,7 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `configure_request` | [models.ConfigureRequest](../../models/configurerequest.md) | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | @@ -101,9 +139,9 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## get_graph_list +## list_graphs -Get Graph List +Return the list of all existing graphs for the current user. ### Example Usage @@ -111,15 +149,14 @@ Get Graph List from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_graph_list() + res = circlemind_sdk.list_graphs() -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -127,12 +164,58 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *OptionalNullable[str]* | :heavy_minus_sign: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response -**[Any](../../models/.md)** +**[models.GraphListResponse](../../models/graphlistresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| models.SDKError | 4XX, 5XX | \*/\* | + +## create_graph + +Create a new graph + +### Example Usage + +```python +from circlemind_sdk import CirclemindSDK +import os + +with CirclemindSDK( + api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), +) as circlemind_sdk: + + res = circlemind_sdk.create_graph(graph_name="", configure_request={ + "domain": "liquid-godfather.org", + "example_queries": "", + "entity_types": [ + "", + "", + "", + ], + }) + + # Handle response + print(res) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `graph_name` | *str* | :heavy_check_mark: | N/A | +| `configure_request` | [models.ConfigureRequest](../../models/configurerequest.md) | :heavy_check_mark: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[models.ConfigureResponse](../../models/configureresponse.md)** ### Errors @@ -141,9 +224,9 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## create_graph +## delete_graph -Create Graph +Delete the selected graph. ### Example Usage @@ -151,23 +234,14 @@ Create Graph from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) - -res = s.create_graph(graph_name="", configure_request={ - "domain": "liquid-godfather.org", - "example_queries": "", - "entity_types": [ - "", - "", - "", - ], -}) - -if res is not None: - # handle response - pass +) as circlemind_sdk: + + res = circlemind_sdk.delete_graph(graph_name="") + + # Handle response + print(res) ``` @@ -175,8 +249,7 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `configure_request` | [models.ConfigureRequest](../../models/configurerequest.md) | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -190,9 +263,48 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## create_query +## download_graphml + +Generate a download URL for the graph in graphml format. + +### Example Usage + +```python +from circlemind_sdk import CirclemindSDK +import os + +with CirclemindSDK( + api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), +) as circlemind_sdk: + + res = circlemind_sdk.download_graphml(graph_name="") + + # Handle response + print(res) + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `graph_name` | *str* | :heavy_check_mark: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Response + +**[models.DownloadGraphResponse](../../models/downloadgraphresponse.md)** + +### Errors + +| Error Type | Status Code | Content Type | +| -------------------------- | -------------------------- | -------------------------- | +| models.HTTPValidationError | 422 | application/json | +| models.SDKError | 4XX, 5XX | \*/\* | + +## query -Post Query +Send a query request to the graph. ### Example Usage @@ -200,17 +312,16 @@ Post Query from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.create_query(graph_name="", query_request={ - "query": "", -}) + res = circlemind_sdk.query(graph_name="", query_request={ + "query": "", + }) -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -218,7 +329,7 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `query_request` | [models.QueryRequest](../../models/queryrequest.md) | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | @@ -233,9 +344,9 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## get_query_handler +## get_query_status -Get Query Handler +Return the status of an existing query request. ### Example Usage @@ -243,15 +354,14 @@ Get Query Handler from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_query_handler(graph_name="", request_id="", request_time=362783) + res = circlemind_sdk.get_query_status(graph_name="", request_id="", request_time=816039) -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -259,7 +369,7 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `request_id` | *str* | :heavy_check_mark: | N/A | | `request_time` | *int* | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | @@ -275,9 +385,9 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## create_insert +## add -Post Insert +Create a new memory in the graph using raw text. ### Example Usage @@ -285,17 +395,16 @@ Post Insert from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.create_insert(graph_name="", memory_request={ - "memory": "", -}) + res = circlemind_sdk.add(graph_name="", insert_request={ + "memory": "", + }) -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -303,8 +412,8 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `memory_request` | [models.MemoryRequest](../../models/memoryrequest.md) | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | +| `insert_request` | [models.InsertRequest](../../models/insertrequest.md) | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response @@ -318,9 +427,9 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## create_graph_files +## add_from_files -Add Files +Create a new memory in the graph from files. ### Example Usage @@ -328,36 +437,39 @@ Add Files from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) - -res = s.create_graph_files(graph_name="", body_add_files_graph_graph_name_files_post={ - "files": [ - { - "file_name": "example.file", - "content": open("example.file", "rb"), - }, - ], -}) - -if res is not None: - # handle response - pass +) as circlemind_sdk: + + res = circlemind_sdk.add_from_files(graph_name="", body_post_insert_files_graph_graph_name_files_post={ + "files": [ + { + "file_name": "example.file", + "content": open("example.file", "rb"), + }, + { + "file_name": "example.file", + "content": open("example.file", "rb"), + }, + ], + }) + + # Handle response + print(res) ``` ### Parameters -| Parameter | Type | Required | Description | -| ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | -| `body_add_files_graph_graph_name_files_post` | [models.BodyAddFilesGraphGraphNameFilesPost](../../models/bodyaddfilesgraphgraphnamefilespost.md) | :heavy_check_mark: | N/A | -| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | +| Parameter | Type | Required | Description | +| --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | +| `graph_name` | *str* | :heavy_check_mark: | N/A | +| `body_post_insert_files_graph_graph_name_files_post` | [models.BodyPostInsertFilesGraphGraphNameFilesPost](../../models/bodypostinsertfilesgraphgraphnamefilespost.md) | :heavy_check_mark: | N/A | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | ### Response -**[Any](../../models/.md)** +**[models.InsertResponse](../../models/insertresponse.md)** ### Errors @@ -366,9 +478,9 @@ if res is not None: | models.HTTPValidationError | 422 | application/json | | models.SDKError | 4XX, 5XX | \*/\* | -## get_insert_handler +## get_add_status -Get Insert Handler +Return the status of an existing add request. ### Example Usage @@ -376,15 +488,14 @@ Get Insert Handler from circlemind_sdk import CirclemindSDK import os -s = CirclemindSDK( +with CirclemindSDK( api_key_header=os.getenv("CIRCLEMINDSDK_API_KEY_HEADER", ""), -) +) as circlemind_sdk: -res = s.get_insert_handler(graph_name="", request_id="", request_time=895985) + res = circlemind_sdk.get_add_status(graph_name="", request_id="", request_time=877284) -if res is not None: - # handle response - pass + # Handle response + print(res) ``` @@ -392,7 +503,7 @@ if res is not None: | Parameter | Type | Required | Description | | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | -| `graph_name` | *Nullable[str]* | :heavy_check_mark: | N/A | +| `graph_name` | *str* | :heavy_check_mark: | N/A | | `request_id` | *str* | :heavy_check_mark: | N/A | | `request_time` | *int* | :heavy_check_mark: | N/A | | `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | diff --git a/poetry.lock b/poetry.lock index fa5aa67..659bfc3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -149,13 +149,13 @@ trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.27.2" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -163,7 +163,6 @@ anyio = "*" certifi = "*" httpcore = "==1.*" idna = "*" -sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] @@ -224,47 +223,53 @@ files = [ [[package]] name = "mypy" -version = "1.10.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, - {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, - {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, - {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, - {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, - {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, - {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, - {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, - {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, - {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, - {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, - {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, - {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, - {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, - {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, - {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, - {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, - {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, - {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -298,22 +303,19 @@ type = ["mypy (>=1.11.2)"] [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.4" -typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, -] +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -321,100 +323,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] @@ -548,4 +561,4 @@ typing-extensions = ">=3.7.4" [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "9e57d395164aeb8637702f9d37b29cff9bdc5ebcbd44305b91808c122428bacb" +content-hash = "231d09484040ca8e2e4ea801ceedb0b672113dd483caa7cb13d217c3e92d7655" diff --git a/pyproject.toml b/pyproject.toml index d31ead1..0ead382 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "circlemind-sdk" -version = "0.1.0" +version = "0.1.0-post0" description = "Python Client SDK Generated by Speakeasy." authors = ["Speakeasy",] readme = "README-PYPI.md" @@ -19,14 +19,14 @@ in-project = true [tool.poetry.dependencies] python = "^3.8" eval-type-backport = "^0.2.0" -httpx = "^0.27.0" +httpx = "^0.28.1" jsonpath-python = "^1.0.6" -pydantic = "~2.9.2" -python-dateutil = "2.8.2" +pydantic = "~2.10.3" +python-dateutil = "^2.8.2" typing-inspect = "^0.9.0" [tool.poetry.group.dev.dependencies] -mypy = "==1.10.1" +mypy = "==1.13.0" pylint = "==3.2.3" types-python-dateutil = "^2.9.0.20240316" diff --git a/src/circlemind_sdk/_version.py b/src/circlemind_sdk/_version.py index 5e14df6..6ecc809 100644 --- a/src/circlemind_sdk/_version.py +++ b/src/circlemind_sdk/_version.py @@ -3,7 +3,7 @@ import importlib.metadata __title__: str = "circlemind-sdk" -__version__: str = "0.1.0" +__version__: str = "0.1.0-post0" try: if __package__ is not None: diff --git a/src/circlemind_sdk/basesdk.py b/src/circlemind_sdk/basesdk.py index 7b93217..2f68ec1 100644 --- a/src/circlemind_sdk/basesdk.py +++ b/src/circlemind_sdk/basesdk.py @@ -9,7 +9,8 @@ ) from circlemind_sdk.utils import RetryConfig, SerializedRequestBody, get_body_content import httpx -from typing import Callable, List, Optional, Tuple +from typing import Callable, List, Mapping, Optional, Tuple +from urllib.parse import parse_qs, urlparse class BaseSDK: @@ -48,6 +49,7 @@ def build_request_async( Callable[[], Optional[SerializedRequestBody]] ] = None, url_override: Optional[str] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> httpx.Request: client = self.sdk_configuration.async_client return self.build_request_with_client( @@ -67,6 +69,7 @@ def build_request_async( timeout_ms, get_serialized_body, url_override, + http_headers, ) def build_request( @@ -88,6 +91,7 @@ def build_request( Callable[[], Optional[SerializedRequestBody]] ] = None, url_override: Optional[str] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> httpx.Request: client = self.sdk_configuration.client return self.build_request_with_client( @@ -107,6 +111,7 @@ def build_request( timeout_ms, get_serialized_body, url_override, + http_headers, ) def build_request_with_client( @@ -129,6 +134,7 @@ def build_request_with_client( Callable[[], Optional[SerializedRequestBody]] ] = None, url_override: Optional[str] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> httpx.Request: query_params = {} @@ -145,6 +151,12 @@ def build_request_with_client( request if request_has_query_params else None, _globals if request_has_query_params else None, ) + else: + # Pick up the query parameter from the override so they can be + # preserved when building the request later on (necessary as of + # httpx 0.28). + parsed_override = urlparse(str(url_override)) + query_params = parse_qs(parsed_override.query, keep_blank_values=True) headers = utils.get_headers(request, _globals) headers["Accept"] = accept_header_value @@ -159,7 +171,7 @@ def build_request_with_client( headers = {**headers, **security_headers} query_params = {**query_params, **security_query_params} - serialized_request_body = SerializedRequestBody("application/octet-stream") + serialized_request_body = SerializedRequestBody() if get_serialized_body is not None: rb = get_serialized_body() if request_body_required and rb is None: @@ -178,6 +190,10 @@ def build_request_with_client( ): headers["content-type"] = serialized_request_body.media_type + if http_headers is not None: + for header, value in http_headers.items(): + headers[header] = value + timeout = timeout_ms / 1000 if timeout_ms is not None else None return client.build_request( diff --git a/src/circlemind_sdk/httpclient.py b/src/circlemind_sdk/httpclient.py index 36b642a..167cea4 100644 --- a/src/circlemind_sdk/httpclient.py +++ b/src/circlemind_sdk/httpclient.py @@ -41,6 +41,9 @@ def build_request( ) -> httpx.Request: pass + def close(self) -> None: + pass + @runtime_checkable class AsyncHttpClient(Protocol): @@ -76,3 +79,6 @@ def build_request( extensions: Optional[httpx._types.RequestExtensions] = None, ) -> httpx.Request: pass + + async def aclose(self) -> None: + pass diff --git a/src/circlemind_sdk/models/__init__.py b/src/circlemind_sdk/models/__init__.py index 9256e23..c1f92d2 100644 --- a/src/circlemind_sdk/models/__init__.py +++ b/src/circlemind_sdk/models/__init__.py @@ -1,12 +1,8 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" -from .add_files_graph_graph_name_files_postop import ( - AddFilesGraphGraphNameFilesPostRequest, - AddFilesGraphGraphNameFilesPostRequestTypedDict, -) -from .body_add_files_graph_graph_name_files_post import ( - BodyAddFilesGraphGraphNameFilesPost, - BodyAddFilesGraphGraphNameFilesPostTypedDict, +from .body_post_insert_files_graph_graph_name_files_post import ( + BodyPostInsertFilesGraphGraphNameFilesPost, + BodyPostInsertFilesGraphGraphNameFilesPostTypedDict, Files, FilesTypedDict, ) @@ -16,29 +12,40 @@ CreateGraphGraphGraphNamePostRequest, CreateGraphGraphGraphNamePostRequestTypedDict, ) +from .delete_graph_graph_graph_name_delete_postop import ( + DeleteGraphGraphGraphNameDeletePostRequest, + DeleteGraphGraphGraphNameDeletePostRequestTypedDict, +) +from .downloadgraphresponse import DownloadGraphResponse, DownloadGraphResponseTypedDict from .get_graph_configuration_graph_graph_name_configuration_getop import ( GetGraphConfigurationGraphGraphNameConfigurationGetRequest, GetGraphConfigurationGraphGraphNameConfigurationGetRequestTypedDict, ) -from .get_graph_list_graph_getop import ( - GetGraphListGraphGetRequest, - GetGraphListGraphGetRequestTypedDict, +from .get_graphml_graph_graph_name_graphml_getop import ( + GetGraphmlGraphGraphNameGraphmlGetRequest, + GetGraphmlGraphGraphNameGraphmlGetRequestTypedDict, ) -from .get_insert_handler_graph_graph_name_insert_request_id_getop import ( - GetInsertHandlerGraphGraphNameInsertRequestIDGetRequest, - GetInsertHandlerGraphGraphNameInsertRequestIDGetRequestTypedDict, +from .get_insert_status_graph_graph_name_insert_request_id_getop import ( + GetInsertStatusGraphGraphNameInsertRequestIDGetRequest, + GetInsertStatusGraphGraphNameInsertRequestIDGetRequestTypedDict, ) -from .get_query_handler_graph_graph_name_query_request_id_getop import ( - GetQueryHandlerGraphGraphNameQueryRequestIDGetRequest, - GetQueryHandlerGraphGraphNameQueryRequestIDGetRequestTypedDict, +from .get_query_status_graph_graph_name_query_request_id_getop import ( + GetQueryStatusGraphGraphNameQueryRequestIDGetRequest, + GetQueryStatusGraphGraphNameQueryRequestIDGetRequestTypedDict, ) +from .graphlistresponse import GraphListResponse, GraphListResponseTypedDict from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData +from .insertrequest import InsertRequest, InsertRequestTypedDict from .insertresponse import InsertResponse, InsertResponseTypedDict -from .memoryrequest import MemoryRequest, MemoryRequestTypedDict +from .planresponse import PlanResponse, PlanResponseTypedDict from .post_graph_configuration_graph_graph_name_configuration_postop import ( PostGraphConfigurationGraphGraphNameConfigurationPostRequest, PostGraphConfigurationGraphGraphNameConfigurationPostRequestTypedDict, ) +from .post_insert_files_graph_graph_name_files_postop import ( + PostInsertFilesGraphGraphNameFilesPostRequest, + PostInsertFilesGraphGraphNameFilesPostRequestTypedDict, +) from .post_insert_graph_graph_name_insert_postop import ( PostInsertGraphGraphNameInsertPostRequest, PostInsertGraphGraphNameInsertPostRequestTypedDict, @@ -60,36 +67,44 @@ ) __all__ = [ - "AddFilesGraphGraphNameFilesPostRequest", - "AddFilesGraphGraphNameFilesPostRequestTypedDict", - "BodyAddFilesGraphGraphNameFilesPost", - "BodyAddFilesGraphGraphNameFilesPostTypedDict", + "BodyPostInsertFilesGraphGraphNameFilesPost", + "BodyPostInsertFilesGraphGraphNameFilesPostTypedDict", "ConfigureRequest", "ConfigureRequestTypedDict", "ConfigureResponse", "ConfigureResponseTypedDict", "CreateGraphGraphGraphNamePostRequest", "CreateGraphGraphGraphNamePostRequestTypedDict", + "DeleteGraphGraphGraphNameDeletePostRequest", + "DeleteGraphGraphGraphNameDeletePostRequestTypedDict", + "DownloadGraphResponse", + "DownloadGraphResponseTypedDict", "Files", "FilesTypedDict", "GetGraphConfigurationGraphGraphNameConfigurationGetRequest", "GetGraphConfigurationGraphGraphNameConfigurationGetRequestTypedDict", - "GetGraphListGraphGetRequest", - "GetGraphListGraphGetRequestTypedDict", - "GetInsertHandlerGraphGraphNameInsertRequestIDGetRequest", - "GetInsertHandlerGraphGraphNameInsertRequestIDGetRequestTypedDict", - "GetQueryHandlerGraphGraphNameQueryRequestIDGetRequest", - "GetQueryHandlerGraphGraphNameQueryRequestIDGetRequestTypedDict", + "GetGraphmlGraphGraphNameGraphmlGetRequest", + "GetGraphmlGraphGraphNameGraphmlGetRequestTypedDict", + "GetInsertStatusGraphGraphNameInsertRequestIDGetRequest", + "GetInsertStatusGraphGraphNameInsertRequestIDGetRequestTypedDict", + "GetQueryStatusGraphGraphNameQueryRequestIDGetRequest", + "GetQueryStatusGraphGraphNameQueryRequestIDGetRequestTypedDict", + "GraphListResponse", + "GraphListResponseTypedDict", "HTTPValidationError", "HTTPValidationErrorData", + "InsertRequest", + "InsertRequestTypedDict", "InsertResponse", "InsertResponseTypedDict", "Loc", "LocTypedDict", - "MemoryRequest", - "MemoryRequestTypedDict", + "PlanResponse", + "PlanResponseTypedDict", "PostGraphConfigurationGraphGraphNameConfigurationPostRequest", "PostGraphConfigurationGraphGraphNameConfigurationPostRequestTypedDict", + "PostInsertFilesGraphGraphNameFilesPostRequest", + "PostInsertFilesGraphGraphNameFilesPostRequestTypedDict", "PostInsertGraphGraphNameInsertPostRequest", "PostInsertGraphGraphNameInsertPostRequestTypedDict", "PostQueryGraphGraphNameQueryPostRequest", diff --git a/src/circlemind_sdk/models/add_files_graph_graph_name_files_postop.py b/src/circlemind_sdk/models/add_files_graph_graph_name_files_postop.py deleted file mode 100644 index 8a874ad..0000000 --- a/src/circlemind_sdk/models/add_files_graph_graph_name_files_postop.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -from .body_add_files_graph_graph_name_files_post import ( - BodyAddFilesGraphGraphNameFilesPost, - BodyAddFilesGraphGraphNameFilesPostTypedDict, -) -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL -from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, RequestMetadata -from pydantic import model_serializer -from typing_extensions import Annotated, TypedDict - - -class AddFilesGraphGraphNameFilesPostRequestTypedDict(TypedDict): - graph_name: Nullable[str] - body_add_files_graph_graph_name_files_post: ( - BodyAddFilesGraphGraphNameFilesPostTypedDict - ) - - -class AddFilesGraphGraphNameFilesPostRequest(BaseModel): - graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), - ] - - body_add_files_graph_graph_name_files_post: Annotated[ - BodyAddFilesGraphGraphNameFilesPost, - FieldMetadata(request=RequestMetadata(media_type="multipart/form-data")), - ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/body_add_files_graph_graph_name_files_post.py b/src/circlemind_sdk/models/body_post_insert_files_graph_graph_name_files_post.py similarity index 82% rename from src/circlemind_sdk/models/body_add_files_graph_graph_name_files_post.py rename to src/circlemind_sdk/models/body_post_insert_files_graph_graph_name_files_post.py index a496405..52dc5c8 100644 --- a/src/circlemind_sdk/models/body_add_files_graph_graph_name_files_post.py +++ b/src/circlemind_sdk/models/body_post_insert_files_graph_graph_name_files_post.py @@ -17,7 +17,7 @@ class FilesTypedDict(TypedDict): class Files(BaseModel): file_name: Annotated[ - str, pydantic.Field(alias="files"), FieldMetadata(multipart=True) + str, pydantic.Field(alias="fileName"), FieldMetadata(multipart=True) ] content: Annotated[ @@ -33,9 +33,9 @@ class Files(BaseModel): ] = None -class BodyAddFilesGraphGraphNameFilesPostTypedDict(TypedDict): +class BodyPostInsertFilesGraphGraphNameFilesPostTypedDict(TypedDict): files: List[FilesTypedDict] -class BodyAddFilesGraphGraphNameFilesPost(BaseModel): +class BodyPostInsertFilesGraphGraphNameFilesPost(BaseModel): files: Annotated[List[Files], FieldMetadata(multipart=True)] diff --git a/src/circlemind_sdk/models/configurerequest.py b/src/circlemind_sdk/models/configurerequest.py index cc2689f..c87ef14 100644 --- a/src/circlemind_sdk/models/configurerequest.py +++ b/src/circlemind_sdk/models/configurerequest.py @@ -8,12 +8,16 @@ class ConfigureRequestTypedDict(TypedDict): + r"""Data model for graph configuration request.""" + domain: str example_queries: str entity_types: List[str] class ConfigureRequest(BaseModel): + r"""Data model for graph configuration request.""" + domain: str example_queries: Annotated[str, pydantic.Field(alias="exampleQueries")] diff --git a/src/circlemind_sdk/models/configureresponse.py b/src/circlemind_sdk/models/configureresponse.py index d422ec1..e987d20 100644 --- a/src/circlemind_sdk/models/configureresponse.py +++ b/src/circlemind_sdk/models/configureresponse.py @@ -8,12 +8,16 @@ class ConfigureResponseTypedDict(TypedDict): + r"""Data model for graph configuration response.""" + domain: str example_queries: str entity_types: List[str] class ConfigureResponse(BaseModel): + r"""Data model for graph configuration response.""" + domain: str example_queries: Annotated[str, pydantic.Field(alias="exampleQueries")] diff --git a/src/circlemind_sdk/models/create_graph_graph_graph_name_postop.py b/src/circlemind_sdk/models/create_graph_graph_graph_name_postop.py index 57b8d04..6703c46 100644 --- a/src/circlemind_sdk/models/create_graph_graph_graph_name_postop.py +++ b/src/circlemind_sdk/models/create_graph_graph_graph_name_postop.py @@ -2,54 +2,22 @@ from __future__ import annotations from .configurerequest import ConfigureRequest, ConfigureRequestTypedDict -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL +from circlemind_sdk.types import BaseModel from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, RequestMetadata -from pydantic import model_serializer from typing_extensions import Annotated, TypedDict class CreateGraphGraphGraphNamePostRequestTypedDict(TypedDict): - graph_name: Nullable[str] + graph_name: str configure_request: ConfigureRequestTypedDict class CreateGraphGraphGraphNamePostRequest(BaseModel): graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] configure_request: Annotated[ ConfigureRequest, FieldMetadata(request=RequestMetadata(media_type="application/json")), ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/delete_graph_graph_graph_name_delete_postop.py b/src/circlemind_sdk/models/delete_graph_graph_graph_name_delete_postop.py new file mode 100644 index 0000000..e503fbc --- /dev/null +++ b/src/circlemind_sdk/models/delete_graph_graph_graph_name_delete_postop.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from circlemind_sdk.types import BaseModel +from circlemind_sdk.utils import FieldMetadata, PathParamMetadata +from typing_extensions import Annotated, TypedDict + + +class DeleteGraphGraphGraphNameDeletePostRequestTypedDict(TypedDict): + graph_name: str + + +class DeleteGraphGraphGraphNameDeletePostRequest(BaseModel): + graph_name: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] diff --git a/src/circlemind_sdk/models/memoryrequest.py b/src/circlemind_sdk/models/downloadgraphresponse.py similarity index 71% rename from src/circlemind_sdk/models/memoryrequest.py rename to src/circlemind_sdk/models/downloadgraphresponse.py index 413803f..dd239f1 100644 --- a/src/circlemind_sdk/models/memoryrequest.py +++ b/src/circlemind_sdk/models/downloadgraphresponse.py @@ -13,25 +13,24 @@ from typing_extensions import Annotated, NotRequired, TypedDict -class MemoryRequestTypedDict(TypedDict): - memory: str - memory_id: NotRequired[Nullable[str]] - metadata: NotRequired[Nullable[str]] +class DownloadGraphResponseTypedDict(TypedDict): + r"""Data model for graph download response.""" + presigned_url: str + last_modified: NotRequired[Nullable[int]] -class MemoryRequest(BaseModel): - memory: str - memory_id: Annotated[OptionalNullable[str], pydantic.Field(alias="memoryId")] = ( - UNSET - ) +class DownloadGraphResponse(BaseModel): + r"""Data model for graph download response.""" - metadata: OptionalNullable[str] = UNSET + presigned_url: Annotated[str, pydantic.Field(alias="presignedUrl")] + + last_modified: OptionalNullable[int] = UNSET @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["memoryId", "metadata"] - nullable_fields = ["memoryId", "metadata"] + optional_fields = ["last_modified"] + nullable_fields = ["last_modified"] null_default_fields = [] serialized = handler(self) diff --git a/src/circlemind_sdk/models/get_graph_configuration_graph_graph_name_configuration_getop.py b/src/circlemind_sdk/models/get_graph_configuration_graph_graph_name_configuration_getop.py index bb6c5f7..e4395f8 100644 --- a/src/circlemind_sdk/models/get_graph_configuration_graph_graph_name_configuration_getop.py +++ b/src/circlemind_sdk/models/get_graph_configuration_graph_graph_name_configuration_getop.py @@ -1,48 +1,16 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL +from circlemind_sdk.types import BaseModel from circlemind_sdk.utils import FieldMetadata, PathParamMetadata -from pydantic import model_serializer from typing_extensions import Annotated, TypedDict class GetGraphConfigurationGraphGraphNameConfigurationGetRequestTypedDict(TypedDict): - graph_name: Nullable[str] + graph_name: str class GetGraphConfigurationGraphGraphNameConfigurationGetRequest(BaseModel): graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/get_graphml_graph_graph_name_graphml_getop.py b/src/circlemind_sdk/models/get_graphml_graph_graph_name_graphml_getop.py new file mode 100644 index 0000000..cddd349 --- /dev/null +++ b/src/circlemind_sdk/models/get_graphml_graph_graph_name_graphml_getop.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from circlemind_sdk.types import BaseModel +from circlemind_sdk.utils import FieldMetadata, PathParamMetadata +from typing_extensions import Annotated, TypedDict + + +class GetGraphmlGraphGraphNameGraphmlGetRequestTypedDict(TypedDict): + graph_name: str + + +class GetGraphmlGraphGraphNameGraphmlGetRequest(BaseModel): + graph_name: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] diff --git a/src/circlemind_sdk/models/get_insert_handler_graph_graph_name_insert_request_id_getop.py b/src/circlemind_sdk/models/get_insert_handler_graph_graph_name_insert_request_id_getop.py deleted file mode 100644 index 451d34e..0000000 --- a/src/circlemind_sdk/models/get_insert_handler_graph_graph_name_insert_request_id_getop.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL -from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata -import pydantic -from pydantic import model_serializer -from typing_extensions import Annotated, TypedDict - - -class GetInsertHandlerGraphGraphNameInsertRequestIDGetRequestTypedDict(TypedDict): - graph_name: Nullable[str] - request_id: str - request_time: int - - -class GetInsertHandlerGraphGraphNameInsertRequestIDGetRequest(BaseModel): - graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), - ] - - request_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - - request_time: Annotated[ - int, - pydantic.Field(alias="requestTime"), - FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), - ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/get_insert_status_graph_graph_name_insert_request_id_getop.py b/src/circlemind_sdk/models/get_insert_status_graph_graph_name_insert_request_id_getop.py new file mode 100644 index 0000000..638abd4 --- /dev/null +++ b/src/circlemind_sdk/models/get_insert_status_graph_graph_name_insert_request_id_getop.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from circlemind_sdk.types import BaseModel +from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +import pydantic +from typing_extensions import Annotated, TypedDict + + +class GetInsertStatusGraphGraphNameInsertRequestIDGetRequestTypedDict(TypedDict): + graph_name: str + request_id: str + request_time: int + + +class GetInsertStatusGraphGraphNameInsertRequestIDGetRequest(BaseModel): + graph_name: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + request_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + request_time: Annotated[ + int, + pydantic.Field(alias="requestTime"), + FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), + ] diff --git a/src/circlemind_sdk/models/get_query_handler_graph_graph_name_query_request_id_getop.py b/src/circlemind_sdk/models/get_query_handler_graph_graph_name_query_request_id_getop.py deleted file mode 100644 index 83f2fc9..0000000 --- a/src/circlemind_sdk/models/get_query_handler_graph_graph_name_query_request_id_getop.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL -from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata -import pydantic -from pydantic import model_serializer -from typing_extensions import Annotated, TypedDict - - -class GetQueryHandlerGraphGraphNameQueryRequestIDGetRequestTypedDict(TypedDict): - graph_name: Nullable[str] - request_id: str - request_time: int - - -class GetQueryHandlerGraphGraphNameQueryRequestIDGetRequest(BaseModel): - graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), - ] - - request_id: Annotated[ - str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) - ] - - request_time: Annotated[ - int, - pydantic.Field(alias="requestTime"), - FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), - ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/get_query_status_graph_graph_name_query_request_id_getop.py b/src/circlemind_sdk/models/get_query_status_graph_graph_name_query_request_id_getop.py new file mode 100644 index 0000000..e712e65 --- /dev/null +++ b/src/circlemind_sdk/models/get_query_status_graph_graph_name_query_request_id_getop.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from circlemind_sdk.types import BaseModel +from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata +import pydantic +from typing_extensions import Annotated, TypedDict + + +class GetQueryStatusGraphGraphNameQueryRequestIDGetRequestTypedDict(TypedDict): + graph_name: str + request_id: str + request_time: int + + +class GetQueryStatusGraphGraphNameQueryRequestIDGetRequest(BaseModel): + graph_name: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + request_id: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + request_time: Annotated[ + int, + pydantic.Field(alias="requestTime"), + FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), + ] diff --git a/src/circlemind_sdk/models/graphlistresponse.py b/src/circlemind_sdk/models/graphlistresponse.py new file mode 100644 index 0000000..998de30 --- /dev/null +++ b/src/circlemind_sdk/models/graphlistresponse.py @@ -0,0 +1,20 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from circlemind_sdk.types import BaseModel +from typing import List +from typing_extensions import TypedDict + + +class GraphListResponseTypedDict(TypedDict): + r"""Data model for graph list response.""" + + graphs: List[str] + r"""List of existing graphs""" + + +class GraphListResponse(BaseModel): + r"""Data model for graph list response.""" + + graphs: List[str] + r"""List of existing graphs""" diff --git a/src/circlemind_sdk/models/get_graph_list_graph_getop.py b/src/circlemind_sdk/models/insertrequest.py similarity index 52% rename from src/circlemind_sdk/models/get_graph_list_graph_getop.py rename to src/circlemind_sdk/models/insertrequest.py index ed5199f..77f83b1 100644 --- a/src/circlemind_sdk/models/get_graph_list_graph_getop.py +++ b/src/circlemind_sdk/models/insertrequest.py @@ -8,25 +8,46 @@ UNSET, UNSET_SENTINEL, ) -from circlemind_sdk.utils import FieldMetadata, QueryParamMetadata +import pydantic from pydantic import model_serializer from typing_extensions import Annotated, NotRequired, TypedDict -class GetGraphListGraphGetRequestTypedDict(TypedDict): - graph_name: NotRequired[Nullable[str]] +class InsertRequestTypedDict(TypedDict): + r"""Data model for insert request.""" + memory: str + r"""Raw text to be inserted into the graph""" + metadata: NotRequired[Nullable[str]] + r"""Stringified JSON dictionary containing any metadata to link to the given memory. -class GetGraphListGraphGetRequest(BaseModel): - graph_name: Annotated[ - OptionalNullable[str], - FieldMetadata(query=QueryParamMetadata(style="form", explode=True)), - ] = UNSET + (i.e., `'{\"id\": \"asdash-234fdsc-erwer-wqes2\", \"url\": \"https://example.com\"}'`) + """ + memory_id: NotRequired[Nullable[str]] + r"""Reserved""" + + +class InsertRequest(BaseModel): + r"""Data model for insert request.""" + + memory: str + r"""Raw text to be inserted into the graph""" + + metadata: OptionalNullable[str] = UNSET + r"""Stringified JSON dictionary containing any metadata to link to the given memory. + + (i.e., `'{\"id\": \"asdash-234fdsc-erwer-wqes2\", \"url\": \"https://example.com\"}'`) + """ + + memory_id: Annotated[OptionalNullable[str], pydantic.Field(alias="memoryId")] = ( + UNSET + ) + r"""Reserved""" @model_serializer(mode="wrap") def serialize_model(self, handler): - optional_fields = ["graph_name"] - nullable_fields = ["graph_name"] + optional_fields = ["metadata", "memoryId"] + nullable_fields = ["metadata", "memoryId"] null_default_fields = [] serialized = handler(self) diff --git a/src/circlemind_sdk/models/insertresponse.py b/src/circlemind_sdk/models/insertresponse.py index 48a92e7..adf0500 100644 --- a/src/circlemind_sdk/models/insertresponse.py +++ b/src/circlemind_sdk/models/insertresponse.py @@ -7,14 +7,24 @@ class InsertResponseTypedDict(TypedDict): - memory_id: str + r"""Data model for insert response.""" + request_id: str - request_time: str + r"""Unique request ID to be used to track the request status""" + request_time: int + r"""Further value to provide to track the request status""" + memory_id: str + r"""Reserved""" class InsertResponse(BaseModel): - memory_id: Annotated[str, pydantic.Field(alias="memoryId")] + r"""Data model for insert response.""" request_id: Annotated[str, pydantic.Field(alias="requestId")] + r"""Unique request ID to be used to track the request status""" + + request_time: Annotated[int, pydantic.Field(alias="requestTime")] + r"""Further value to provide to track the request status""" - request_time: Annotated[str, pydantic.Field(alias="requestTime")] + memory_id: Annotated[str, pydantic.Field(alias="memoryId")] + r"""Reserved""" diff --git a/src/circlemind_sdk/models/planresponse.py b/src/circlemind_sdk/models/planresponse.py new file mode 100644 index 0000000..ba6cfc6 --- /dev/null +++ b/src/circlemind_sdk/models/planresponse.py @@ -0,0 +1,31 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from circlemind_sdk.types import BaseModel +import pydantic +from typing_extensions import Annotated, TypedDict + + +class PlanResponseTypedDict(TypedDict): + r"""Data model for plan response.""" + + requests_count: float + r"""Total number of requests used for the current period""" + requests_max: int + r"""Number of available requests for the active plan""" + plan_id: str + plan_ttl: int + + +class PlanResponse(BaseModel): + r"""Data model for plan response.""" + + requests_count: Annotated[float, pydantic.Field(alias="requestsCount")] + r"""Total number of requests used for the current period""" + + requests_max: Annotated[int, pydantic.Field(alias="requestsMax")] + r"""Number of available requests for the active plan""" + + plan_id: Annotated[str, pydantic.Field(alias="planId")] + + plan_ttl: Annotated[int, pydantic.Field(alias="planTTL")] diff --git a/src/circlemind_sdk/models/post_graph_configuration_graph_graph_name_configuration_postop.py b/src/circlemind_sdk/models/post_graph_configuration_graph_graph_name_configuration_postop.py index ba8ae3c..95fda6b 100644 --- a/src/circlemind_sdk/models/post_graph_configuration_graph_graph_name_configuration_postop.py +++ b/src/circlemind_sdk/models/post_graph_configuration_graph_graph_name_configuration_postop.py @@ -2,54 +2,22 @@ from __future__ import annotations from .configurerequest import ConfigureRequest, ConfigureRequestTypedDict -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL +from circlemind_sdk.types import BaseModel from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, RequestMetadata -from pydantic import model_serializer from typing_extensions import Annotated, TypedDict class PostGraphConfigurationGraphGraphNameConfigurationPostRequestTypedDict(TypedDict): - graph_name: Nullable[str] + graph_name: str configure_request: ConfigureRequestTypedDict class PostGraphConfigurationGraphGraphNameConfigurationPostRequest(BaseModel): graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] configure_request: Annotated[ ConfigureRequest, FieldMetadata(request=RequestMetadata(media_type="application/json")), ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/post_insert_files_graph_graph_name_files_postop.py b/src/circlemind_sdk/models/post_insert_files_graph_graph_name_files_postop.py new file mode 100644 index 0000000..5048590 --- /dev/null +++ b/src/circlemind_sdk/models/post_insert_files_graph_graph_name_files_postop.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .body_post_insert_files_graph_graph_name_files_post import ( + BodyPostInsertFilesGraphGraphNameFilesPost, + BodyPostInsertFilesGraphGraphNameFilesPostTypedDict, +) +from circlemind_sdk.types import BaseModel +from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, RequestMetadata +from typing_extensions import Annotated, TypedDict + + +class PostInsertFilesGraphGraphNameFilesPostRequestTypedDict(TypedDict): + graph_name: str + body_post_insert_files_graph_graph_name_files_post: ( + BodyPostInsertFilesGraphGraphNameFilesPostTypedDict + ) + + +class PostInsertFilesGraphGraphNameFilesPostRequest(BaseModel): + graph_name: Annotated[ + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) + ] + + body_post_insert_files_graph_graph_name_files_post: Annotated[ + BodyPostInsertFilesGraphGraphNameFilesPost, + FieldMetadata(request=RequestMetadata(media_type="multipart/form-data")), + ] diff --git a/src/circlemind_sdk/models/post_insert_graph_graph_name_insert_postop.py b/src/circlemind_sdk/models/post_insert_graph_graph_name_insert_postop.py index 3d778cc..b9f1b46 100644 --- a/src/circlemind_sdk/models/post_insert_graph_graph_name_insert_postop.py +++ b/src/circlemind_sdk/models/post_insert_graph_graph_name_insert_postop.py @@ -1,55 +1,23 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations -from .memoryrequest import MemoryRequest, MemoryRequestTypedDict -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL +from .insertrequest import InsertRequest, InsertRequestTypedDict +from circlemind_sdk.types import BaseModel from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, RequestMetadata -from pydantic import model_serializer from typing_extensions import Annotated, TypedDict class PostInsertGraphGraphNameInsertPostRequestTypedDict(TypedDict): - graph_name: Nullable[str] - memory_request: MemoryRequestTypedDict + graph_name: str + insert_request: InsertRequestTypedDict class PostInsertGraphGraphNameInsertPostRequest(BaseModel): graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] - memory_request: Annotated[ - MemoryRequest, + insert_request: Annotated[ + InsertRequest, FieldMetadata(request=RequestMetadata(media_type="application/json")), ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/post_query_graph_graph_name_query_postop.py b/src/circlemind_sdk/models/post_query_graph_graph_name_query_postop.py index 4e77e54..e66065f 100644 --- a/src/circlemind_sdk/models/post_query_graph_graph_name_query_postop.py +++ b/src/circlemind_sdk/models/post_query_graph_graph_name_query_postop.py @@ -2,54 +2,22 @@ from __future__ import annotations from .queryrequest import QueryRequest, QueryRequestTypedDict -from circlemind_sdk.types import BaseModel, Nullable, UNSET_SENTINEL +from circlemind_sdk.types import BaseModel from circlemind_sdk.utils import FieldMetadata, PathParamMetadata, RequestMetadata -from pydantic import model_serializer from typing_extensions import Annotated, TypedDict class PostQueryGraphGraphNameQueryPostRequestTypedDict(TypedDict): - graph_name: Nullable[str] + graph_name: str query_request: QueryRequestTypedDict class PostQueryGraphGraphNameQueryPostRequest(BaseModel): graph_name: Annotated[ - Nullable[str], - FieldMetadata(path=PathParamMetadata(style="simple", explode=False)), + str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False)) ] query_request: Annotated[ QueryRequest, FieldMetadata(request=RequestMetadata(media_type="application/json")), ] - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - optional_fields = [] - nullable_fields = ["graph_name"] - null_default_fields = [] - - serialized = handler(self) - - m = {} - - for n, f in self.model_fields.items(): - k = f.alias or n - val = serialized.get(k) - serialized.pop(k, None) - - optional_nullable = k in optional_fields and k in nullable_fields - is_set = ( - self.__pydantic_fields_set__.intersection({n}) - or k in null_default_fields - ) # pylint: disable=no-member - - if val is not None and val != UNSET_SENTINEL: - m[k] = val - elif val != UNSET_SENTINEL and ( - not k in optional_fields or (optional_nullable and is_set) - ): - m[k] = val - - return m diff --git a/src/circlemind_sdk/models/queryrequest.py b/src/circlemind_sdk/models/queryrequest.py index 02ee1f1..deeab0a 100644 --- a/src/circlemind_sdk/models/queryrequest.py +++ b/src/circlemind_sdk/models/queryrequest.py @@ -13,14 +13,30 @@ class QueryRequestTypedDict(TypedDict): + r"""Data model for query request.""" + query: str parameters: NotRequired[Nullable[str]] + r"""Optional stringified JSON parameters: + - only_context (bool = false): return only the context of the query instead of processing it via an LLM; + - with_references (bool = false): include references in the response. + + (i.e., `'{\"only_context\": true}'`) + """ class QueryRequest(BaseModel): + r"""Data model for query request.""" + query: str parameters: OptionalNullable[str] = UNSET + r"""Optional stringified JSON parameters: + - only_context (bool = false): return only the context of the query instead of processing it via an LLM; + - with_references (bool = false): include references in the response. + + (i.e., `'{\"only_context\": true}'`) + """ @model_serializer(mode="wrap") def serialize_model(self, handler): diff --git a/src/circlemind_sdk/models/queryresponse.py b/src/circlemind_sdk/models/queryresponse.py index 21f91b5..1f85cae 100644 --- a/src/circlemind_sdk/models/queryresponse.py +++ b/src/circlemind_sdk/models/queryresponse.py @@ -7,11 +7,19 @@ class QueryResponseTypedDict(TypedDict): + r"""Data model for query response.""" + request_id: str + r"""Unique request ID to be used to track the request status""" request_time: int + r"""Further value to provide to track the request status""" class QueryResponse(BaseModel): + r"""Data model for query response.""" + request_id: Annotated[str, pydantic.Field(alias="requestId")] + r"""Unique request ID to be used to track the request status""" request_time: Annotated[int, pydantic.Field(alias="requestTime")] + r"""Further value to provide to track the request status""" diff --git a/src/circlemind_sdk/models/validationerror.py b/src/circlemind_sdk/models/validationerror.py index 1b2cb1e..e0cc3dd 100644 --- a/src/circlemind_sdk/models/validationerror.py +++ b/src/circlemind_sdk/models/validationerror.py @@ -3,13 +3,13 @@ from __future__ import annotations from circlemind_sdk.types import BaseModel from typing import List, Union -from typing_extensions import TypedDict +from typing_extensions import TypeAliasType, TypedDict -LocTypedDict = Union[str, int] +LocTypedDict = TypeAliasType("LocTypedDict", Union[str, int]) -Loc = Union[str, int] +Loc = TypeAliasType("Loc", Union[str, int]) class ValidationErrorTypedDict(TypedDict): diff --git a/src/circlemind_sdk/sdk.py b/src/circlemind_sdk/sdk.py index 8edb339..01ba34e 100644 --- a/src/circlemind_sdk/sdk.py +++ b/src/circlemind_sdk/sdk.py @@ -7,10 +7,10 @@ from .utils.retries import RetryConfig from circlemind_sdk import models, utils from circlemind_sdk._hooks import HookContext, SDKHooks -from circlemind_sdk.types import Nullable, OptionalNullable, UNSET +from circlemind_sdk.types import OptionalNullable, UNSET from circlemind_sdk.utils import get_security_from_env import httpx -from typing import Any, Callable, Dict, Optional, Union +from typing import Any, Callable, Dict, Mapping, Optional, Union class CirclemindSDK(BaseSDK): @@ -92,20 +92,495 @@ def __init__( # pylint: disable=protected-access self.sdk_configuration.__dict__["_hooks"] = hooks + def __enter__(self): + return self + + async def __aenter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.sdk_configuration.client is not None: + self.sdk_configuration.client.close() + + async def __aexit__(self, exc_type, exc_val, exc_tb): + if self.sdk_configuration.async_client is not None: + await self.sdk_configuration.async_client.aclose() + + def get_user_plan_plan_get( + self, + *, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.PlanResponse: + r"""User plan + + Return the active plan for the current user and its usage metrics. + + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + req = self.build_request( + method="GET", + path="/plan", + base_url=base_url, + url_variables=url_variables, + request=None, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(500, 60000, 1.5, 3600000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5XX"]) + + http_res = self.do_request( + hook_ctx=HookContext( + operation_id="get_user_plan_plan_get", + oauth2_scopes=[], + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["4XX", "5XX"], + retry_config=retry_config, + ) + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, models.PlanResponse) + if utils.match_response(http_res, ["4XX", "5XX"], "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, + ) + + async def get_user_plan_plan_get_async( + self, + *, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.PlanResponse: + r"""User plan + + Return the active plan for the current user and its usage metrics. + + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + req = self.build_request_async( + method="GET", + path="/plan", + base_url=base_url, + url_variables=url_variables, + request=None, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(500, 60000, 1.5, 3600000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5XX"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + operation_id="get_user_plan_plan_get", + oauth2_scopes=[], + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["4XX", "5XX"], + retry_config=retry_config, + ) + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, models.PlanResponse) + if utils.match_response(http_res, ["4XX", "5XX"], "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, + ) + def get_graph_configuration( self, *, - graph_name: Nullable[str], + graph_name: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.ConfigureResponse: + r"""Graph configuration (get) + + Retrieve the configuration details of a specific graph by its name. + + :param graph_name: + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetGraphConfigurationGraphGraphNameConfigurationGetRequest( + graph_name=graph_name, + ) + + req = self.build_request( + method="GET", + path="/graph/{graph_name}/configuration", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(500, 60000, 1.5, 3600000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5XX"]) + + http_res = self.do_request( + hook_ctx=HookContext( + operation_id="get_graph_configuration_graph__graph_name__configuration_get", + oauth2_scopes=[], + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, models.ConfigureResponse) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) + raise models.HTTPValidationError(data=data) + if utils.match_response(http_res, ["4XX", "5XX"], "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, + ) + + async def get_graph_configuration_async( + self, + *, + graph_name: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.ConfigureResponse: + r"""Graph configuration (get) + + Retrieve the configuration details of a specific graph by its name. + + :param graph_name: + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetGraphConfigurationGraphGraphNameConfigurationGetRequest( + graph_name=graph_name, + ) + + req = self.build_request_async( + method="GET", + path="/graph/{graph_name}/configuration", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(500, 60000, 1.5, 3600000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5XX"]) + + http_res = await self.do_request_async( + hook_ctx=HookContext( + operation_id="get_graph_configuration_graph__graph_name__configuration_get", + oauth2_scopes=[], + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, models.ConfigureResponse) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) + raise models.HTTPValidationError(data=data) + if utils.match_response(http_res, ["4XX", "5XX"], "*"): + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = await utils.stream_to_text_async(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, + ) + + def set_graph_configuration( + self, + *, + graph_name: str, + configure_request: Union[ + models.ConfigureRequest, models.ConfigureRequestTypedDict + ], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.ConfigureResponse: + r"""Graph configuration (set) + + Update the configuration details of a specific graph by its name. + + :param graph_name: + :param configure_request: + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.PostGraphConfigurationGraphGraphNameConfigurationPostRequest( + graph_name=graph_name, + configure_request=utils.get_pydantic_model( + configure_request, models.ConfigureRequest + ), + ) + + req = self.build_request( + method="POST", + path="/graph/{graph_name}/configuration", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body( + request.configure_request, False, False, "json", models.ConfigureRequest + ), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(500, 60000, 1.5, 3600000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5XX"]) + + http_res = self.do_request( + hook_ctx=HookContext( + operation_id="post_graph_configuration_graph__graph_name__configuration_post", + oauth2_scopes=[], + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["422", "4XX", "5XX"], + retry_config=retry_config, + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, models.ConfigureResponse) + if utils.match_response(http_res, "422", "application/json"): + data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) + raise models.HTTPValidationError(data=data) + if utils.match_response(http_res, ["4XX", "5XX"], "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, + ) + + async def set_graph_configuration_async( + self, + *, + graph_name: str, + configure_request: Union[ + models.ConfigureRequest, models.ConfigureRequestTypedDict + ], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.ConfigureResponse: - r"""Get Graph Configuration + r"""Graph configuration (set) + + Update the configuration details of a specific graph by its name. :param graph_name: + :param configure_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -115,22 +590,29 @@ def get_graph_configuration( if server_url is not None: base_url = server_url - request = models.GetGraphConfigurationGraphGraphNameConfigurationGetRequest( + request = models.PostGraphConfigurationGraphGraphNameConfigurationPostRequest( graph_name=graph_name, + configure_request=utils.get_pydantic_model( + configure_request, models.ConfigureRequest + ), ) - req = self.build_request( - method="GET", + req = self.build_request_async( + method="POST", path="/graph/{graph_name}/configuration", base_url=base_url, url_variables=url_variables, request=request, - request_body_required=False, + request_body_required=True, request_has_path_params=True, request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body( + request.configure_request, False, False, "json", models.ConfigureRequest + ), timeout_ms=timeout_ms, ) @@ -146,9 +628,9 @@ def get_graph_configuration( if isinstance(retries, utils.RetryConfig): retry_config = (retries, ["5XX"]) - http_res = self.do_request( + http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="get_graph_configuration_graph__graph_name__configuration_get", + operation_id="post_graph_configuration_graph__graph_name__configuration_post", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -166,13 +648,13 @@ def get_graph_configuration( data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) raise models.HTTPValidationError(data=data) if utils.match_response(http_res, ["4XX", "5XX"], "*"): - http_res_text = utils.stream_to_text(http_res) + http_res_text = await utils.stream_to_text_async(http_res) raise models.SDKError( "API error occurred", http_res.status_code, http_res_text, http_res ) content_type = http_res.headers.get("Content-Type") - http_res_text = utils.stream_to_text(http_res) + http_res_text = await utils.stream_to_text_async(http_res) raise models.SDKError( f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, @@ -180,20 +662,22 @@ def get_graph_configuration( http_res, ) - async def get_graph_configuration_async( + def list_graphs( self, *, - graph_name: Nullable[str], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> models.ConfigureResponse: - r"""Get Graph Configuration + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.GraphListResponse: + r"""List graphs + + Return the list of all existing graphs for the current user. - :param graph_name: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -202,22 +686,100 @@ async def get_graph_configuration_async( if server_url is not None: base_url = server_url + req = self.build_request( + method="GET", + path="/graph", + base_url=base_url, + url_variables=url_variables, + request=None, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + http_headers=http_headers, + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) - request = models.GetGraphConfigurationGraphGraphNameConfigurationGetRequest( - graph_name=graph_name, + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + else: + retries = utils.RetryConfig( + "backoff", utils.BackoffStrategy(500, 60000, 1.5, 3600000), True + ) + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, ["5XX"]) + + http_res = self.do_request( + hook_ctx=HookContext( + operation_id="get_graph_list_graph_get", + oauth2_scopes=[], + security_source=get_security_from_env( + self.sdk_configuration.security, models.Security + ), + ), + request=req, + error_status_codes=["4XX", "5XX"], + retry_config=retry_config, + ) + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, models.GraphListResponse) + if utils.match_response(http_res, ["4XX", "5XX"], "*"): + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + "API error occurred", http_res.status_code, http_res_text, http_res + ) + + content_type = http_res.headers.get("Content-Type") + http_res_text = utils.stream_to_text(http_res) + raise models.SDKError( + f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", + http_res.status_code, + http_res_text, + http_res, ) + async def list_graphs_async( + self, + *, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.GraphListResponse: + r"""List graphs + + Return the list of all existing graphs for the current user. + + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url req = self.build_request_async( method="GET", - path="/graph/{graph_name}/configuration", + path="/graph", base_url=base_url, url_variables=url_variables, - request=request, + request=None, request_body_required=False, - request_has_path_params=True, + request_has_path_params=False, request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, timeout_ms=timeout_ms, ) @@ -236,23 +798,19 @@ async def get_graph_configuration_async( http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="get_graph_configuration_graph__graph_name__configuration_get", + operation_id="get_graph_list_graph_get", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security ), ), request=req, - error_status_codes=["422", "4XX", "5XX"], + error_status_codes=["4XX", "5XX"], retry_config=retry_config, ) - data: Any = None if utils.match_response(http_res, "200", "application/json"): - return utils.unmarshal_json(http_res.text, models.ConfigureResponse) - if utils.match_response(http_res, "422", "application/json"): - data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) - raise models.HTTPValidationError(data=data) + return utils.unmarshal_json(http_res.text, models.GraphListResponse) if utils.match_response(http_res, ["4XX", "5XX"], "*"): http_res_text = await utils.stream_to_text_async(http_res) raise models.SDKError( @@ -268,24 +826,28 @@ async def get_graph_configuration_async( http_res, ) - def create_graph_configuration( + def create_graph( self, *, - graph_name: Nullable[str], + graph_name: str, configure_request: Union[ models.ConfigureRequest, models.ConfigureRequestTypedDict ], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.ConfigureResponse: - r"""Post Graph Configuration + r"""Create new graph + + Create a new graph :param graph_name: :param configure_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -295,7 +857,7 @@ def create_graph_configuration( if server_url is not None: base_url = server_url - request = models.PostGraphConfigurationGraphGraphNameConfigurationPostRequest( + request = models.CreateGraphGraphGraphNamePostRequest( graph_name=graph_name, configure_request=utils.get_pydantic_model( configure_request, models.ConfigureRequest @@ -304,7 +866,7 @@ def create_graph_configuration( req = self.build_request( method="POST", - path="/graph/{graph_name}/configuration", + path="/graph/{graph_name}", base_url=base_url, url_variables=url_variables, request=request, @@ -313,6 +875,7 @@ def create_graph_configuration( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( request.configure_request, False, False, "json", models.ConfigureRequest @@ -334,7 +897,7 @@ def create_graph_configuration( http_res = self.do_request( hook_ctx=HookContext( - operation_id="post_graph_configuration_graph__graph_name__configuration_post", + operation_id="create_graph_graph__graph_name__post", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -366,24 +929,28 @@ def create_graph_configuration( http_res, ) - async def create_graph_configuration_async( + async def create_graph_async( self, *, - graph_name: Nullable[str], + graph_name: str, configure_request: Union[ models.ConfigureRequest, models.ConfigureRequestTypedDict ], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.ConfigureResponse: - r"""Post Graph Configuration + r"""Create new graph + + Create a new graph :param graph_name: :param configure_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -393,7 +960,7 @@ async def create_graph_configuration_async( if server_url is not None: base_url = server_url - request = models.PostGraphConfigurationGraphGraphNameConfigurationPostRequest( + request = models.CreateGraphGraphGraphNamePostRequest( graph_name=graph_name, configure_request=utils.get_pydantic_model( configure_request, models.ConfigureRequest @@ -402,7 +969,7 @@ async def create_graph_configuration_async( req = self.build_request_async( method="POST", - path="/graph/{graph_name}/configuration", + path="/graph/{graph_name}", base_url=base_url, url_variables=url_variables, request=request, @@ -411,6 +978,7 @@ async def create_graph_configuration_async( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( request.configure_request, False, False, "json", models.ConfigureRequest @@ -432,7 +1000,7 @@ async def create_graph_configuration_async( http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="post_graph_configuration_graph__graph_name__configuration_post", + operation_id="create_graph_graph__graph_name__post", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -464,20 +1032,24 @@ async def create_graph_configuration_async( http_res, ) - def get_graph_list( + def delete_graph( self, *, - graph_name: OptionalNullable[str] = UNSET, + graph_name: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> Any: - r"""Get Graph List + r"""Delete existing graph + + Delete the selected graph. :param graph_name: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -487,21 +1059,22 @@ def get_graph_list( if server_url is not None: base_url = server_url - request = models.GetGraphListGraphGetRequest( + request = models.DeleteGraphGraphGraphNameDeletePostRequest( graph_name=graph_name, ) req = self.build_request( - method="GET", - path="/graph", + method="POST", + path="/graph/{graph_name}/delete", base_url=base_url, url_variables=url_variables, request=request, request_body_required=False, - request_has_path_params=False, + request_has_path_params=True, request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, timeout_ms=timeout_ms, ) @@ -520,7 +1093,7 @@ def get_graph_list( http_res = self.do_request( hook_ctx=HookContext( - operation_id="get_graph_list_graph_get", + operation_id="delete_graph_graph__graph_name__delete_post", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -552,20 +1125,24 @@ def get_graph_list( http_res, ) - async def get_graph_list_async( + async def delete_graph_async( self, *, - graph_name: OptionalNullable[str] = UNSET, + graph_name: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> Any: - r"""Get Graph List + r"""Delete existing graph + + Delete the selected graph. :param graph_name: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -575,21 +1152,22 @@ async def get_graph_list_async( if server_url is not None: base_url = server_url - request = models.GetGraphListGraphGetRequest( + request = models.DeleteGraphGraphGraphNameDeletePostRequest( graph_name=graph_name, ) req = self.build_request_async( - method="GET", - path="/graph", + method="POST", + path="/graph/{graph_name}/delete", base_url=base_url, url_variables=url_variables, request=request, request_body_required=False, - request_has_path_params=False, + request_has_path_params=True, request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, timeout_ms=timeout_ms, ) @@ -608,7 +1186,7 @@ async def get_graph_list_async( http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="get_graph_list_graph_get", + operation_id="delete_graph_graph__graph_name__delete_post", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -640,24 +1218,24 @@ async def get_graph_list_async( http_res, ) - def create_graph( + def download_graphml( self, *, - graph_name: Nullable[str], - configure_request: Union[ - models.ConfigureRequest, models.ConfigureRequestTypedDict - ], + graph_name: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Any: - r"""Create Graph + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.DownloadGraphResponse: + r"""Download graphml + + Generate a download URL for the graph in graphml format. :param graph_name: - :param configure_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -667,28 +1245,23 @@ def create_graph( if server_url is not None: base_url = server_url - request = models.CreateGraphGraphGraphNamePostRequest( + request = models.GetGraphmlGraphGraphNameGraphmlGetRequest( graph_name=graph_name, - configure_request=utils.get_pydantic_model( - configure_request, models.ConfigureRequest - ), ) req = self.build_request( - method="POST", - path="/graph/{graph_name}", + method="GET", + path="/graph/{graph_name}/graphml", base_url=base_url, url_variables=url_variables, request=request, - request_body_required=True, + request_body_required=False, request_has_path_params=True, request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, - get_serialized_body=lambda: utils.serialize_request_body( - request.configure_request, False, False, "json", models.ConfigureRequest - ), timeout_ms=timeout_ms, ) @@ -706,7 +1279,7 @@ def create_graph( http_res = self.do_request( hook_ctx=HookContext( - operation_id="create_graph_graph__graph_name__post", + operation_id="get_graphml_graph__graph_name__graphml_get", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -719,7 +1292,7 @@ def create_graph( data: Any = None if utils.match_response(http_res, "200", "application/json"): - return utils.unmarshal_json(http_res.text, Any) + return utils.unmarshal_json(http_res.text, models.DownloadGraphResponse) if utils.match_response(http_res, "422", "application/json"): data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) raise models.HTTPValidationError(data=data) @@ -738,24 +1311,24 @@ def create_graph( http_res, ) - async def create_graph_async( + async def download_graphml_async( self, *, - graph_name: Nullable[str], - configure_request: Union[ - models.ConfigureRequest, models.ConfigureRequestTypedDict - ], + graph_name: str, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Any: - r"""Create Graph + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.DownloadGraphResponse: + r"""Download graphml + + Generate a download URL for the graph in graphml format. :param graph_name: - :param configure_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -765,28 +1338,23 @@ async def create_graph_async( if server_url is not None: base_url = server_url - request = models.CreateGraphGraphGraphNamePostRequest( + request = models.GetGraphmlGraphGraphNameGraphmlGetRequest( graph_name=graph_name, - configure_request=utils.get_pydantic_model( - configure_request, models.ConfigureRequest - ), ) req = self.build_request_async( - method="POST", - path="/graph/{graph_name}", + method="GET", + path="/graph/{graph_name}/graphml", base_url=base_url, url_variables=url_variables, request=request, - request_body_required=True, + request_body_required=False, request_has_path_params=True, request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, - get_serialized_body=lambda: utils.serialize_request_body( - request.configure_request, False, False, "json", models.ConfigureRequest - ), timeout_ms=timeout_ms, ) @@ -804,7 +1372,7 @@ async def create_graph_async( http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="create_graph_graph__graph_name__post", + operation_id="get_graphml_graph__graph_name__graphml_get", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -817,7 +1385,7 @@ async def create_graph_async( data: Any = None if utils.match_response(http_res, "200", "application/json"): - return utils.unmarshal_json(http_res.text, Any) + return utils.unmarshal_json(http_res.text, models.DownloadGraphResponse) if utils.match_response(http_res, "422", "application/json"): data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) raise models.HTTPValidationError(data=data) @@ -836,22 +1404,26 @@ async def create_graph_async( http_res, ) - def create_query( + def query( self, *, - graph_name: Nullable[str], + graph_name: str, query_request: Union[models.QueryRequest, models.QueryRequestTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.QueryResponse: - r"""Post Query + r"""Query memory + + Send a query request to the graph. :param graph_name: :param query_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -877,6 +1449,7 @@ def create_query( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( request.query_request, False, False, "json", models.QueryRequest @@ -930,22 +1503,26 @@ def create_query( http_res, ) - async def create_query_async( + async def query_async( self, *, - graph_name: Nullable[str], + graph_name: str, query_request: Union[models.QueryRequest, models.QueryRequestTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.QueryResponse: - r"""Post Query + r"""Query memory + + Send a query request to the graph. :param graph_name: :param query_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -971,6 +1548,7 @@ async def create_query_async( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( request.query_request, False, False, "json", models.QueryRequest @@ -1024,17 +1602,20 @@ async def create_query_async( http_res, ) - def get_query_handler( + def get_query_status( self, *, - graph_name: Nullable[str], + graph_name: str, request_id: str, request_time: int, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.RequestStatus: - r"""Get Query Handler + r"""Check query request status + + Return the status of an existing query request. :param graph_name: :param request_id: @@ -1042,6 +1623,7 @@ def get_query_handler( :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1051,7 +1633,7 @@ def get_query_handler( if server_url is not None: base_url = server_url - request = models.GetQueryHandlerGraphGraphNameQueryRequestIDGetRequest( + request = models.GetQueryStatusGraphGraphNameQueryRequestIDGetRequest( graph_name=graph_name, request_id=request_id, request_time=request_time, @@ -1068,6 +1650,7 @@ def get_query_handler( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, timeout_ms=timeout_ms, ) @@ -1086,7 +1669,7 @@ def get_query_handler( http_res = self.do_request( hook_ctx=HookContext( - operation_id="get_query_handler_graph__graph_name__query__request_id__get", + operation_id="get_query_status_graph__graph_name__query__request_id__get", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1118,17 +1701,20 @@ def get_query_handler( http_res, ) - async def get_query_handler_async( + async def get_query_status_async( self, *, - graph_name: Nullable[str], + graph_name: str, request_id: str, request_time: int, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.RequestStatus: - r"""Get Query Handler + r"""Check query request status + + Return the status of an existing query request. :param graph_name: :param request_id: @@ -1136,6 +1722,7 @@ async def get_query_handler_async( :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1145,7 +1732,7 @@ async def get_query_handler_async( if server_url is not None: base_url = server_url - request = models.GetQueryHandlerGraphGraphNameQueryRequestIDGetRequest( + request = models.GetQueryStatusGraphGraphNameQueryRequestIDGetRequest( graph_name=graph_name, request_id=request_id, request_time=request_time, @@ -1162,6 +1749,7 @@ async def get_query_handler_async( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, timeout_ms=timeout_ms, ) @@ -1180,7 +1768,7 @@ async def get_query_handler_async( http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="get_query_handler_graph__graph_name__query__request_id__get", + operation_id="get_query_status_graph__graph_name__query__request_id__get", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1212,22 +1800,26 @@ async def get_query_handler_async( http_res, ) - def create_insert( + def add( self, *, - graph_name: Nullable[str], - memory_request: Union[models.MemoryRequest, models.MemoryRequestTypedDict], + graph_name: str, + insert_request: Union[models.InsertRequest, models.InsertRequestTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.InsertResponse: - r"""Post Insert + r"""Add memory + + Create a new memory in the graph using raw text. :param graph_name: - :param memory_request: + :param insert_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1239,8 +1831,8 @@ def create_insert( request = models.PostInsertGraphGraphNameInsertPostRequest( graph_name=graph_name, - memory_request=utils.get_pydantic_model( - memory_request, models.MemoryRequest + insert_request=utils.get_pydantic_model( + insert_request, models.InsertRequest ), ) @@ -1255,9 +1847,10 @@ def create_insert( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.memory_request, False, False, "json", models.MemoryRequest + request.insert_request, False, False, "json", models.InsertRequest ), timeout_ms=timeout_ms, ) @@ -1308,22 +1901,26 @@ def create_insert( http_res, ) - async def create_insert_async( + async def add_async( self, *, - graph_name: Nullable[str], - memory_request: Union[models.MemoryRequest, models.MemoryRequestTypedDict], + graph_name: str, + insert_request: Union[models.InsertRequest, models.InsertRequestTypedDict], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.InsertResponse: - r"""Post Insert + r"""Add memory + + Create a new memory in the graph using raw text. :param graph_name: - :param memory_request: + :param insert_request: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1335,8 +1932,8 @@ async def create_insert_async( request = models.PostInsertGraphGraphNameInsertPostRequest( graph_name=graph_name, - memory_request=utils.get_pydantic_model( - memory_request, models.MemoryRequest + insert_request=utils.get_pydantic_model( + insert_request, models.InsertRequest ), ) @@ -1351,9 +1948,10 @@ async def create_insert_async( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.memory_request, False, False, "json", models.MemoryRequest + request.insert_request, False, False, "json", models.InsertRequest ), timeout_ms=timeout_ms, ) @@ -1404,25 +2002,29 @@ async def create_insert_async( http_res, ) - def create_graph_files( + def add_from_files( self, *, - graph_name: Nullable[str], - body_add_files_graph_graph_name_files_post: Union[ - models.BodyAddFilesGraphGraphNameFilesPost, - models.BodyAddFilesGraphGraphNameFilesPostTypedDict, + graph_name: str, + body_post_insert_files_graph_graph_name_files_post: Union[ + models.BodyPostInsertFilesGraphGraphNameFilesPost, + models.BodyPostInsertFilesGraphGraphNameFilesPostTypedDict, ], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Any: - r"""Add Files + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.InsertResponse: + r"""Add memory (from files) + + Create a new memory in the graph from files. :param graph_name: - :param body_add_files_graph_graph_name_files_post: + :param body_post_insert_files_graph_graph_name_files_post: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1432,11 +2034,11 @@ def create_graph_files( if server_url is not None: base_url = server_url - request = models.AddFilesGraphGraphNameFilesPostRequest( + request = models.PostInsertFilesGraphGraphNameFilesPostRequest( graph_name=graph_name, - body_add_files_graph_graph_name_files_post=utils.get_pydantic_model( - body_add_files_graph_graph_name_files_post, - models.BodyAddFilesGraphGraphNameFilesPost, + body_post_insert_files_graph_graph_name_files_post=utils.get_pydantic_model( + body_post_insert_files_graph_graph_name_files_post, + models.BodyPostInsertFilesGraphGraphNameFilesPost, ), ) @@ -1451,13 +2053,14 @@ def create_graph_files( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.body_add_files_graph_graph_name_files_post, + request.body_post_insert_files_graph_graph_name_files_post, False, False, "multipart", - models.BodyAddFilesGraphGraphNameFilesPost, + models.BodyPostInsertFilesGraphGraphNameFilesPost, ), timeout_ms=timeout_ms, ) @@ -1476,7 +2079,7 @@ def create_graph_files( http_res = self.do_request( hook_ctx=HookContext( - operation_id="add_files_graph__graph_name__files_post", + operation_id="post_insert_files_graph__graph_name__files_post", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1489,7 +2092,7 @@ def create_graph_files( data: Any = None if utils.match_response(http_res, "200", "application/json"): - return utils.unmarshal_json(http_res.text, Any) + return utils.unmarshal_json(http_res.text, models.InsertResponse) if utils.match_response(http_res, "422", "application/json"): data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) raise models.HTTPValidationError(data=data) @@ -1508,25 +2111,29 @@ def create_graph_files( http_res, ) - async def create_graph_files_async( + async def add_from_files_async( self, *, - graph_name: Nullable[str], - body_add_files_graph_graph_name_files_post: Union[ - models.BodyAddFilesGraphGraphNameFilesPost, - models.BodyAddFilesGraphGraphNameFilesPostTypedDict, + graph_name: str, + body_post_insert_files_graph_graph_name_files_post: Union[ + models.BodyPostInsertFilesGraphGraphNameFilesPost, + models.BodyPostInsertFilesGraphGraphNameFilesPostTypedDict, ], retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Any: - r"""Add Files + http_headers: Optional[Mapping[str, str]] = None, + ) -> models.InsertResponse: + r"""Add memory (from files) + + Create a new memory in the graph from files. :param graph_name: - :param body_add_files_graph_graph_name_files_post: + :param body_post_insert_files_graph_graph_name_files_post: :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1536,11 +2143,11 @@ async def create_graph_files_async( if server_url is not None: base_url = server_url - request = models.AddFilesGraphGraphNameFilesPostRequest( + request = models.PostInsertFilesGraphGraphNameFilesPostRequest( graph_name=graph_name, - body_add_files_graph_graph_name_files_post=utils.get_pydantic_model( - body_add_files_graph_graph_name_files_post, - models.BodyAddFilesGraphGraphNameFilesPost, + body_post_insert_files_graph_graph_name_files_post=utils.get_pydantic_model( + body_post_insert_files_graph_graph_name_files_post, + models.BodyPostInsertFilesGraphGraphNameFilesPost, ), ) @@ -1555,13 +2162,14 @@ async def create_graph_files_async( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, get_serialized_body=lambda: utils.serialize_request_body( - request.body_add_files_graph_graph_name_files_post, + request.body_post_insert_files_graph_graph_name_files_post, False, False, "multipart", - models.BodyAddFilesGraphGraphNameFilesPost, + models.BodyPostInsertFilesGraphGraphNameFilesPost, ), timeout_ms=timeout_ms, ) @@ -1580,7 +2188,7 @@ async def create_graph_files_async( http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="add_files_graph__graph_name__files_post", + operation_id="post_insert_files_graph__graph_name__files_post", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1593,7 +2201,7 @@ async def create_graph_files_async( data: Any = None if utils.match_response(http_res, "200", "application/json"): - return utils.unmarshal_json(http_res.text, Any) + return utils.unmarshal_json(http_res.text, models.InsertResponse) if utils.match_response(http_res, "422", "application/json"): data = utils.unmarshal_json(http_res.text, models.HTTPValidationErrorData) raise models.HTTPValidationError(data=data) @@ -1612,17 +2220,20 @@ async def create_graph_files_async( http_res, ) - def get_insert_handler( + def get_add_status( self, *, - graph_name: Nullable[str], + graph_name: str, request_id: str, request_time: int, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.RequestStatus: - r"""Get Insert Handler + r"""Check add request status + + Return the status of an existing add request. :param graph_name: :param request_id: @@ -1630,6 +2241,7 @@ def get_insert_handler( :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1639,7 +2251,7 @@ def get_insert_handler( if server_url is not None: base_url = server_url - request = models.GetInsertHandlerGraphGraphNameInsertRequestIDGetRequest( + request = models.GetInsertStatusGraphGraphNameInsertRequestIDGetRequest( graph_name=graph_name, request_id=request_id, request_time=request_time, @@ -1656,6 +2268,7 @@ def get_insert_handler( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, timeout_ms=timeout_ms, ) @@ -1674,7 +2287,7 @@ def get_insert_handler( http_res = self.do_request( hook_ctx=HookContext( - operation_id="get_insert_handler_graph__graph_name__insert__request_id__get", + operation_id="get_insert_status_graph__graph_name__insert__request_id__get", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security @@ -1706,17 +2319,20 @@ def get_insert_handler( http_res, ) - async def get_insert_handler_async( + async def get_add_status_async( self, *, - graph_name: Nullable[str], + graph_name: str, request_id: str, request_time: int, retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, + http_headers: Optional[Mapping[str, str]] = None, ) -> models.RequestStatus: - r"""Get Insert Handler + r"""Check add request status + + Return the status of an existing add request. :param graph_name: :param request_id: @@ -1724,6 +2340,7 @@ async def get_insert_handler_async( :param retries: Override the default retry configuration for this method :param server_url: Override the default server URL for this method :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + :param http_headers: Additional headers to set or replace on requests. """ base_url = None url_variables = None @@ -1733,7 +2350,7 @@ async def get_insert_handler_async( if server_url is not None: base_url = server_url - request = models.GetInsertHandlerGraphGraphNameInsertRequestIDGetRequest( + request = models.GetInsertStatusGraphGraphNameInsertRequestIDGetRequest( graph_name=graph_name, request_id=request_id, request_time=request_time, @@ -1750,6 +2367,7 @@ async def get_insert_handler_async( request_has_query_params=True, user_agent_header="user-agent", accept_header_value="application/json", + http_headers=http_headers, security=self.sdk_configuration.security, timeout_ms=timeout_ms, ) @@ -1768,7 +2386,7 @@ async def get_insert_handler_async( http_res = await self.do_request_async( hook_ctx=HookContext( - operation_id="get_insert_handler_graph__graph_name__insert__request_id__get", + operation_id="get_insert_status_graph__graph_name__insert__request_id__get", oauth2_scopes=[], security_source=get_security_from_env( self.sdk_configuration.security, models.Security diff --git a/src/circlemind_sdk/sdkconfiguration.py b/src/circlemind_sdk/sdkconfiguration.py index 445e530..7ccf152 100644 --- a/src/circlemind_sdk/sdkconfiguration.py +++ b/src/circlemind_sdk/sdkconfiguration.py @@ -25,10 +25,10 @@ class SDKConfiguration: server_url: Optional[str] = "" server_idx: Optional[int] = 0 language: str = "python" - openapi_doc_version: str = "0.3.0" - sdk_version: str = "0.1.0" - gen_version: str = "2.460.1" - user_agent: str = "speakeasy-sdk/python 0.1.0 2.460.1 0.3.0 circlemind-sdk" + openapi_doc_version: str = "0.0.4" + sdk_version: str = "0.1.0-post0" + gen_version: str = "2.479.7" + user_agent: str = "speakeasy-sdk/python 0.1.0-post0 2.479.7 0.0.4 circlemind-sdk" retry_config: OptionalNullable[RetryConfig] = Field(default_factory=lambda: UNSET) timeout_ms: Optional[int] = None diff --git a/src/circlemind_sdk/utils/annotations.py b/src/circlemind_sdk/utils/annotations.py index 5b3bbb0..387874e 100644 --- a/src/circlemind_sdk/utils/annotations.py +++ b/src/circlemind_sdk/utils/annotations.py @@ -1,30 +1,55 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from enum import Enum -from typing import Any +from typing import Any, Optional def get_discriminator(model: Any, fieldname: str, key: str) -> str: - if isinstance(model, dict): - try: - return f'{model.get(key)}' - except AttributeError as e: - raise ValueError(f'Could not find discriminator key {key} in {model}') from e + """ + Recursively search for the discriminator attribute in a model. - if hasattr(model, fieldname): - attr = getattr(model, fieldname) + Args: + model (Any): The model to search within. + fieldname (str): The name of the field to search for. + key (str): The key to search for in dictionaries. - if isinstance(attr, Enum): - return f'{attr.value}' + Returns: + str: The name of the discriminator attribute. - return f'{attr}' + Raises: + ValueError: If the discriminator attribute is not found. + """ + upper_fieldname = fieldname.upper() - fieldname = fieldname.upper() - if hasattr(model, fieldname): - attr = getattr(model, fieldname) + def get_field_discriminator(field: Any) -> Optional[str]: + """Search for the discriminator attribute in a given field.""" - if isinstance(attr, Enum): - return f'{attr.value}' + if isinstance(field, dict): + if key in field: + return f'{field[key]}' - return f'{attr}' + if hasattr(field, fieldname): + attr = getattr(field, fieldname) + if isinstance(attr, Enum): + return f'{attr.value}' + return f'{attr}' + + if hasattr(field, upper_fieldname): + attr = getattr(field, upper_fieldname) + if isinstance(attr, Enum): + return f'{attr.value}' + return f'{attr}' + + return None + + + if isinstance(model, list): + for field in model: + discriminator = get_field_discriminator(field) + if discriminator is not None: + return discriminator + + discriminator = get_field_discriminator(model) + if discriminator is not None: + return discriminator raise ValueError(f'Could not find discriminator field {fieldname} in {model}') diff --git a/src/circlemind_sdk/utils/eventstreaming.py b/src/circlemind_sdk/utils/eventstreaming.py index 553b386..74a63f7 100644 --- a/src/circlemind_sdk/utils/eventstreaming.py +++ b/src/circlemind_sdk/utils/eventstreaming.py @@ -2,12 +2,72 @@ import re import json -from typing import Callable, TypeVar, Optional, Generator, AsyncGenerator, Tuple +from typing import ( + Callable, + Generic, + TypeVar, + Optional, + Generator, + AsyncGenerator, + Tuple, +) import httpx T = TypeVar("T") +class EventStream(Generic[T]): + response: httpx.Response + generator: Generator[T, None, None] + + def __init__( + self, + response: httpx.Response, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, + ): + self.response = response + self.generator = stream_events(response, decoder, sentinel) + + def __iter__(self): + return self + + def __next__(self): + return next(self.generator) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.response.close() + + +class EventStreamAsync(Generic[T]): + response: httpx.Response + generator: AsyncGenerator[T, None] + + def __init__( + self, + response: httpx.Response, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, + ): + self.response = response + self.generator = stream_events_async(response, decoder, sentinel) + + def __aiter__(self): + return self + + async def __anext__(self): + return await self.generator.__anext__() + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.response.aclose() + + class ServerEvent: id: Optional[str] = None event: Optional[str] = None diff --git a/src/circlemind_sdk/utils/forms.py b/src/circlemind_sdk/utils/forms.py index 9f5a731..0472aba 100644 --- a/src/circlemind_sdk/utils/forms.py +++ b/src/circlemind_sdk/utils/forms.py @@ -109,13 +109,12 @@ def serialize_multipart_form( if not field_metadata: continue - f_name = field.alias if field.alias is not None else name + f_name = field.alias if field.alias else name if field_metadata.file: file_fields: Dict[str, FieldInfo] = val.__class__.model_fields file_name = "" - field_name = "" content = None content_type = None @@ -131,20 +130,15 @@ def serialize_multipart_form( elif file_field_name == "content_type": content_type = getattr(val, file_field_name, None) else: - field_name = ( - file_field.alias - if file_field.alias is not None - else file_field_name - ) file_name = getattr(val, file_field_name) - if field_name == "" or file_name == "" or content is None: + if file_name == "" or content is None: raise ValueError("invalid multipart/form-data file") if content_type is not None: - files[field_name] = (file_name, content, content_type) + files[f_name] = (file_name, content, content_type) else: - files[field_name] = (file_name, content) + files[f_name] = (file_name, content) elif field_metadata.json: files[f_name] = ( None, diff --git a/src/circlemind_sdk/utils/requestbodies.py b/src/circlemind_sdk/utils/requestbodies.py index 4f586ae..d5240dd 100644 --- a/src/circlemind_sdk/utils/requestbodies.py +++ b/src/circlemind_sdk/utils/requestbodies.py @@ -23,7 +23,7 @@ @dataclass class SerializedRequestBody: - media_type: str + media_type: Optional[str] = None content: Optional[Any] = None data: Optional[Any] = None files: Optional[Any] = None