From f1d0d3457bcf0c44498e120f3fd668f4eeb90833 Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 12:48:00 +0200 Subject: [PATCH 01/11] Refactor(plugins): Move internal cv_client code to PyAVD --- .flake8 | 2 +- .pre-commit-config.yaml | 8 +- .../arista/avd/plugins/action/cv_workflow.py | 35 +- .../plugin_utils/cv_client/__init__.py | 7 - .../api/arista/inventory/v1/__init__.py | 1985 -------- .../cv_client/api/arista/tag/v1/__init__.py | 2130 --------- .../plugin_utils/cv_client/mocked_classes.py | 104 - ansible_collections/arista/avd/pylintrc | 5 - pylintrc | 5 +- pyproject.toml | 4 +- .../pyavd/_cv}/Makefile | 14 +- .../pyavd/_cv}/README.txt | 4 +- .../pyavd/_cv}/__init__.py | 3 - .../pyavd/_cv/api}/__init__.py | 0 .../pyavd/_cv/api/arista}/__init__.py | 0 .../pyavd/_cv/api/arista/alert}/__init__.py | 0 .../_cv}/api/arista/alert/v1/__init__.py | 521 ++- .../_cv/api/arista/bugexposure}/__init__.py | 0 .../api/arista/bugexposure/v1/__init__.py | 14 +- .../_cv/api/arista/changecontrol}/__init__.py | 0 .../api/arista/changecontrol/v1/__init__.py | 416 +- .../_cv/api/arista/configlet}/__init__.py | 0 .../_cv}/api/arista/configlet/v1/__init__.py | 578 ++- .../_cv/api/arista/configstatus}/__init__.py | 0 .../api/arista/configstatus/v1/__init__.py | 822 +++- .../arista/connectivitymonitor}/__init__.py | 0 .../arista/connectivitymonitor/v1/__init__.py | 276 +- .../_cv/api/arista/dashboard}/__init__.py | 0 .../_cv}/api/arista/dashboard/v1/__init__.py | 420 +- .../api/arista/endpointlocation}/__init__.py | 0 .../arista/endpointlocation/v1/__init__.py | 147 +- .../pyavd/_cv/api/arista/event}/__init__.py | 0 .../_cv}/api/arista/event/v1/__init__.py | 14 +- .../api/arista/identityprovider}/__init__.py | 0 .../arista/identityprovider/v1/__init__.py | 276 +- .../_cv/api/arista/imagestatus}/__init__.py | 0 .../api/arista/imagestatus/v1/__init__.py | 326 +- .../_cv/api/arista/inventory}/__init__.py | 0 .../_cv/api/arista/inventory/v1/__init__.py | 3994 +++++++++++++++++ .../_cv/api/arista/lifecycle}/__init__.py | 0 .../_cv}/api/arista/lifecycle/v1/__init__.py | 14 +- .../_cv/api/arista/redirector}/__init__.py | 0 .../_cv}/api/arista/redirector/v1/__init__.py | 145 +- .../api/arista/serviceaccount}/__init__.py | 0 .../api/arista/serviceaccount/v1/__init__.py | 38 +- .../pyavd/_cv/api/arista/studio}/__init__.py | 0 .../_cv}/api/arista/studio/v1/__init__.py | 1364 +++++- .../_cv}/api/arista/subscriptions/__init__.py | 8 +- .../pyavd/_cv/api/arista/swg}/__init__.py | 0 .../pyavd/_cv}/api/arista/swg/v1/__init__.py | 14 +- .../pyavd/_cv/api/arista/tag}/__init__.py | 0 .../pyavd/_cv}/api/arista/tag/v2/__init__.py | 14 +- .../pyavd/_cv}/api/arista/time/__init__.py | 8 +- .../_cv/api/arista/workspace/__init__.py | 3 + .../_cv}/api/arista/workspace/v1/__init__.py | 14 +- .../pyavd/_cv}/api/fmp/__init__.py | 8 +- .../pyavd/_cv/client/__init__.py | 15 +- .../pyavd/_cv}/client/change_control.py | 2 +- .../pyavd/_cv}/client/configlet.py | 2 +- .../pyavd/_cv}/client/constants.py | 0 .../pyavd/_cv}/client/exceptions.py | 14 +- .../pyavd/_cv}/client/inventory.py | 2 +- .../pyavd/_cv}/client/studio.py | 17 +- .../pyavd/_cv}/client/swg.py | 2 +- .../pyavd/_cv}/client/tag.py | 2 +- .../pyavd/_cv}/client/utils.py | 13 +- .../pyavd/_cv}/client/workspace.py | 2 +- .../arista/swg.v1/services.gen.proto | 0 .../extra_cv_protos/arista/swg.v1/swg.proto | 0 python-avd/pyavd/_cv/workflows/__init__.py | 3 + .../_cv}/workflows/create_workspace_on_cv.py | 0 .../_cv}/workflows/deploy_configs_to_cv.py | 0 .../deploy_cv_pathfinder_metadata_to_cv.py | 0 .../workflows/deploy_studio_inputs_to_cv.py | 0 .../pyavd/_cv}/workflows/deploy_tags_to_cv.py | 0 .../pyavd/_cv}/workflows/deploy_to_cv.py | 2 +- .../finalize_change_control_on_cv.py | 0 .../workflows/finalize_workspace_on_cv.py | 2 +- .../pyavd/_cv}/workflows/models.py | 0 .../_cv}/workflows/verify_devices_on_cv.py | 0 .../network_services/utils_zscaler.py | 8 +- 81 files changed, 9250 insertions(+), 4576 deletions(-) delete mode 100644 ansible_collections/arista/avd/plugins/plugin_utils/cv_client/__init__.py delete mode 100644 ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/inventory/v1/__init__.py delete mode 100644 ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/v1/__init__.py delete mode 100644 ansible_collections/arista/avd/plugins/plugin_utils/cv_client/mocked_classes.py rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/Makefile (51%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/README.txt (64%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client => python-avd/pyavd/_cv}/__init__.py (73%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista => python-avd/pyavd/_cv/api}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/alert => python-avd/pyavd/_cv/api/arista}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/bugexposure => python-avd/pyavd/_cv/api/arista/alert}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/alert/v1/__init__.py (85%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/changecontrol => python-avd/pyavd/_cv/api/arista/bugexposure}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/bugexposure/v1/__init__.py (96%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configlet => python-avd/pyavd/_cv/api/arista/changecontrol}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/changecontrol/v1/__init__.py (82%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configstatus => python-avd/pyavd/_cv/api/arista/configlet}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/configlet/v1/__init__.py (78%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/connectivitymonitor => python-avd/pyavd/_cv/api/arista/configstatus}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/configstatus/v1/__init__.py (73%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/dashboard => python-avd/pyavd/_cv/api/arista/connectivitymonitor}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/connectivitymonitor/v1/__init__.py (74%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/endpointlocation => python-avd/pyavd/_cv/api/arista/dashboard}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/dashboard/v1/__init__.py (77%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/event => python-avd/pyavd/_cv/api/arista/endpointlocation}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/endpointlocation/v1/__init__.py (84%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/identityprovider => python-avd/pyavd/_cv/api/arista/event}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/event/v1/__init__.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/imagestatus => python-avd/pyavd/_cv/api/arista/identityprovider}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/identityprovider/v1/__init__.py (84%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/inventory => python-avd/pyavd/_cv/api/arista/imagestatus}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/imagestatus/v1/__init__.py (71%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/lifecycle => python-avd/pyavd/_cv/api/arista/inventory}/__init__.py (100%) create mode 100644 python-avd/pyavd/_cv/api/arista/inventory/v1/__init__.py rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/redirector => python-avd/pyavd/_cv/api/arista/lifecycle}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/lifecycle/v1/__init__.py (97%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/serviceaccount => python-avd/pyavd/_cv/api/arista/redirector}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/redirector/v1/__init__.py (74%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/studio => python-avd/pyavd/_cv/api/arista/serviceaccount}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/serviceaccount/v1/__init__.py (98%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/swg => python-avd/pyavd/_cv/api/arista/studio}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/studio/v1/__init__.py (80%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/subscriptions/__init__.py (84%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag => python-avd/pyavd/_cv/api/arista/swg}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/swg/v1/__init__.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/workspace => python-avd/pyavd/_cv/api/arista/tag}/__init__.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/tag/v2/__init__.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/time/__init__.py (69%) create mode 100644 python-avd/pyavd/_cv/api/arista/workspace/__init__.py rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/arista/workspace/v1/__init__.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/api/fmp/__init__.py (98%) rename ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/cv_client.py => python-avd/pyavd/_cv/client/__init__.py (95%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/change_control.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/configlet.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/constants.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/exceptions.py (87%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/inventory.py (98%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/studio.py (98%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/swg.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/tag.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/utils.py (93%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/client/workspace.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/extra_cv_protos/arista/swg.v1/services.gen.proto (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/extra_cv_protos/arista/swg.v1/swg.proto (100%) create mode 100644 python-avd/pyavd/_cv/workflows/__init__.py rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/create_workspace_on_cv.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/deploy_configs_to_cv.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/deploy_cv_pathfinder_metadata_to_cv.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/deploy_studio_inputs_to_cv.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/deploy_tags_to_cv.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/deploy_to_cv.py (99%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/finalize_change_control_on_cv.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/finalize_workspace_on_cv.py (97%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/models.py (100%) rename {ansible_collections/arista/avd/plugins/plugin_utils/cv_client => python-avd/pyavd/_cv}/workflows/verify_devices_on_cv.py (100%) diff --git a/.flake8 b/.flake8 index fe436bdcb98..3d93a09de27 100644 --- a/.flake8 +++ b/.flake8 @@ -8,4 +8,4 @@ max-line-length = 160 exclude = # The cv_client api is generated from proto files, so it should not be linted. - ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api + python-avd/pyavd/_cv/api diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index db4582b7c8a..37c21f8aa69 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -98,8 +98,8 @@ repos: # ignoring errors and selecting line length as per # https://github.com/ansible/ansible/blob/devel/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt types: [python] - # The cv_client api is generated from proto files, so it should not be linted. - exclude: ^ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/ + # The cv client api is generated from proto files, so it should not be linted. + exclude: ^python-avd/pyavd/_cv/api - repo: https://github.com/pycqa/pylint rev: "v3.1.1" @@ -160,8 +160,8 @@ repos: ansible_collections/arista/avd/roles/.*/docs/tables/.*\.md| # Exclude all .md files in tables python-avd/pyavd/.*/schema/.*schema\.yml| # Exclude YAML schemas file python-avd/pyavd/.*/schema/.*jsonschema\.json| # Exclude JSON schemas file - ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/.*| # Exclude auto generated files for CV API - ansible_collections/arista/avd/plugins/plugin_utils/cv_client/extra_cv_protos/.*| # Exclude extra CV proto files + python-avd/pyavd/_cv/api/.*| # Exclude auto generated files for CV API + python-avd/pyavd/_cv/extra_cv_protos/.*| # Exclude extra CV proto files ansible-avd/ansible_collections/arista/avd/docs/plugins/.* | # Excluded auto generated doc for Ansible plugins )$ diff --git a/ansible_collections/arista/avd/plugins/action/cv_workflow.py b/ansible_collections/arista/avd/plugins/action/cv_workflow.py index 1690bd0f0f5..7dcd1ea286f 100644 --- a/ansible_collections/arista/avd/plugins/action/cv_workflow.py +++ b/ansible_collections/arista/avd/plugins/action/cv_workflow.py @@ -16,21 +16,29 @@ from ansible.plugins.action import ActionBase, display from yaml import load -from ansible_collections.arista.avd.plugins.plugin_utils.cv_client import deploy_to_cv -from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.workflows.models import ( - CloudVision, - CVChangeControl, - CVDevice, - CVDeviceTag, - CVEosConfig, - CVInterfaceTag, - CVPathfinderMetadata, - CVTimeOuts, - CVWorkspace, -) from ansible_collections.arista.avd.plugins.plugin_utils.strip_empties import strip_empties_from_dict from ansible_collections.arista.avd.plugins.plugin_utils.utils import PythonToAnsibleHandler, YamlLoader, get +PLUGIN_NAME = "arista.avd.cv_workflow" + +try: + from pyavd._cv.workflows.deploy_to_cv import deploy_to_cv + from pyavd._cv.workflows.models import ( + CloudVision, + CVChangeControl, + CVDevice, + CVDeviceTag, + CVEosConfig, + CVInterfaceTag, + CVPathfinderMetadata, + CVTimeOuts, + CVWorkspace, + ) + + HAS_PYAVD = True +except ImportError: + HAS_PYAVD = False + LOGGER = logging.getLogger("ansible_collections.arista.avd") LOGGING_LEVELS = ["DEBUG", "INFO", "ERROR", "WARNING", "CRITICAL"] @@ -84,6 +92,9 @@ def run(self, tmp=None, task_vars=None): result = super().run(tmp, task_vars) del tmp # tmp no longer has any effect + if not HAS_PYAVD: + raise AnsibleActionFail("The Python library 'pyavd' was not found. Install using 'pip3 install'.") + # Setup module logging setup_module_logging(result) diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/__init__.py b/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/__init__.py deleted file mode 100644 index 9bcbfd1a427..00000000000 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (c) 2023-2024 Arista Networks, Inc. -# Use of this source code is governed by the Apache License 2.0 -# that can be found in the LICENSE file. - -from .workflows.deploy_to_cv import deploy_to_cv - -__all__ = ["deploy_to_cv"] diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/inventory/v1/__init__.py b/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/inventory/v1/__init__.py deleted file mode 100644 index ff0d1a4a334..00000000000 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/inventory/v1/__init__.py +++ /dev/null @@ -1,1985 +0,0 @@ -# Copyright (c) 2024 Arista Networks, Inc. -# Use of this source code is governed by the Apache License 2.0 -# that can be found in the LICENSE file. -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: arista/inventory.v1/inventory.proto, arista/inventory.v1/services.gen.proto -# plugin: python-aristaproto -# This file has been @generated - -from dataclasses import dataclass -from datetime import datetime -from typing import ( - TYPE_CHECKING, - AsyncIterator, - Dict, - List, - Optional, -) - -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True - -from .... import fmp as ___fmp__ -from ... import ( - subscriptions as __subscriptions__, - time as __time__, -) - - -if TYPE_CHECKING: - import grpclib.server - from aristaproto.grpc.grpclib_client import MetadataLike - from grpclib.metadata import Deadline - - -class StreamingStatus(aristaproto.Enum): - """ - StreamingStatus defines the status of telemetry streaming for a device. - """ - - UNSPECIFIED = 0 - INACTIVE = 1 - """ - STREAMING_STATUS_INACTIVE indicates the device is not streaming telemetry. - """ - - ACTIVE = 2 - """STREAMING_STATUS_ACTIVE indicates the device is streaming telemetry.""" - - -class OnboardingStatus(aristaproto.Enum): - """ - OnboardingStatus defines the set of possible states in the onboarding process - for a device. - """ - - UNSPECIFIED = 0 - IN_PROGRESS = 1 - """ONBOARDING_STATUS_IN_PROGRESS indicates onboarding is in progress.""" - - FAILURE = 2 - """ONBOARDING_STATUS_FAILURE indicates onboarding failed.""" - - SUCCESS = 3 - """ONBOARDING_STATUS_SUCCESS indicates onboarding succeeded.""" - - -class DecommissioningStatus(aristaproto.Enum): - """ - DecommissioningStatus defines the set of possible states in the decommissioning - process for a device. - """ - - UNSPECIFIED = 0 - IN_PROGRESS = 1 - """ - DECOMMISSIONING_STATUS_IN_PROGRESS indicates decommissioning is in progress. - """ - - FAILURE = 2 - """DECOMMISSIONING_STATUS_FAILURE indicates decommissioning failed.""" - - SUCCESS = 3 - """DECOMMISSIONING_STATUS_SUCCESS indicates decommissioning succeeded.""" - - -class ProvisioningStatus(aristaproto.Enum): - """ - ProvisioningStatus defines the set of possible states in the provisioning - process for a device. - """ - - UNSPECIFIED = 0 - IN_PROGRESS = 1 - """ - PROVISIONING_STATUS_IN_PROGRESS indicates provisioning is in progress. - """ - - FAILURE = 2 - """PROVISIONING_STATUS_FAILURE indicates provisioning failed.""" - - SUCCESS = 3 - """PROVISIONING_STATUS_SUCCESS indicates provisioning succeeded.""" - - -@dataclass(eq=False, repr=False) -class ExtendedAttributes(aristaproto.Message): - """ - ExtendedAttributes wraps any additional, potentially non-standard, features - or attributes the device reports. - """ - - feature_enabled: Dict[str, bool] = aristaproto.map_field( - 1, aristaproto.TYPE_STRING, aristaproto.TYPE_BOOL - ) - """ - feature_enabled is a map of feature name to enabled status. - If a feature is missing from this map it can be assumed off. - """ - - -@dataclass(eq=False, repr=False) -class DeviceKey(aristaproto.Message): - """DeviceKey uniquely identifies a single device.""" - - device_id: Optional[str] = aristaproto.message_field( - 1, wraps=aristaproto.TYPE_STRING - ) - """device_id is the unique identifier of the device.""" - - -@dataclass(eq=False, repr=False) -class DeviceConfiguration(aristaproto.Message): - """ - DeviceConfiguration holds the device-specific configuration for a third-party - device, as defined in https://github.com/aristanetworks/cloudvision-go. - """ - - options: Dict[str, str] = aristaproto.map_field( - 1, aristaproto.TYPE_STRING, aristaproto.TYPE_STRING - ) - """ - options is a map from device option to value. - - E.g., for an SNMP device, this could be the following: - - "address": "my_snmp_hostname", - "community": "public" - """ - - -@dataclass(eq=False, repr=False) -class UuidKey(aristaproto.Message): - """ - UUIDKey is a key that holds a UUID for an onboarding or decommissioning request. - """ - - request_id: Optional[str] = aristaproto.message_field( - 1, wraps=aristaproto.TYPE_STRING - ) - """request_id should be a UUID for the request.""" - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfig(aristaproto.Message): - """ - DeviceOnboardingConfig describes a device onboarding request. "Onboarding" - refers to the process of initiating device streaming to CloudVision and - adding the streaming device to CloudVision's inventory. - - The request flow works as follows: - - 1. Set on DeviceOnboardingConfig sends an onboarding request with a UUID - that the user is responsible for generating. - 2. Once the server receives the request, it validates and records it. - 3. Then, the server processes it, initiating the onboarding procedure and - tracking the status of the onboarding attempt. - 4. The user may do a GetOne or Subscribe on DeviceOnboarding using the same - UUID to see the status of the request. - """ - - key: "UuidKey" = aristaproto.message_field(1) - """key identifies the request to onboard the device at hostname_or_ip.""" - - hostname_or_ip: Optional[str] = aristaproto.message_field( - 2, wraps=aristaproto.TYPE_STRING - ) - """ - hostname_or_ip is a hostname or an IP at which the device can be reached. - """ - - device_type: Optional[str] = aristaproto.message_field( - 3, wraps=aristaproto.TYPE_STRING - ) - """ - device_type describes the method by which to retrieve information for the - device. The value should be "eos" for eos devices. For third-party devices, - supported values are: "openconfig", "snmp", "cvp", "mwm", and "vCenter". - """ - - device_config: "DeviceConfiguration" = aristaproto.message_field(4) - """device_config is the configuration for a third-party device.""" - - -@dataclass(eq=False, repr=False) -class DeviceOnboarding(aristaproto.Message): - """DeviceOnboarding describes the status of an onboarding process.""" - - key: "UuidKey" = aristaproto.message_field(1) - """ - key identifies the request for which to retrieve an onboarding status. - """ - - device_id: Optional[str] = aristaproto.message_field( - 2, wraps=aristaproto.TYPE_STRING - ) - """device_id is the unique device ID that is discovered via onboarding.""" - - status: "OnboardingStatus" = aristaproto.enum_field(3) - """status describes the onboarding status of the device.""" - - error: Optional[str] = aristaproto.message_field(4, wraps=aristaproto.TYPE_STRING) - """ - error is the error that caused status to become ONBOARDING_STATUS_FAILURE. - """ - - status_message: Optional[str] = aristaproto.message_field( - 5, wraps=aristaproto.TYPE_STRING - ) - """ - status_message contains information on the status of the onboarding attempt, - if any. This is generally an unstructured log message that is for display - purposes only (its structure and contents may change). - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfig(aristaproto.Message): - """ - DeviceDecommissioningConfig describes a device decommissioning request. - "Decommissioning" refers to the process of stopping device streaming to - CloudVision and removing it from CloudVision's inventory. - - The request flow works as follows: - - 1. Set on DeviceDecommissioningConfig sends a decommissioning request with - a UUID that the user is responsible for generating. - 2. Once the server receives the request, it validates and records it. - 3. Then, the server processes it, initiating the decommissioning procedure - and tracking the status of the decommissioning attempt. - 4. The user may do a GetOne or Subscribe on DeviceDecommissioning using the - same UUID to see the status of the request. - """ - - key: "UuidKey" = aristaproto.message_field(1) - """key identifies the request to decommission the device.""" - - device_id: Optional[str] = aristaproto.message_field( - 2, wraps=aristaproto.TYPE_STRING - ) - """ - device_id is the unique device ID that was discovered via onboarding. - """ - - force: Optional[bool] = aristaproto.message_field(3, wraps=aristaproto.TYPE_BOOL) - """ - force is a flag that indicates if the decommission is to be forced. - Normally, if there are pending or in-progress tasks associated with the device - the decommission would fail. In case of a forced decommission, such blocking - tasks would be ignored and decommissioning will be continued. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioning(aristaproto.Message): - """DeviceOnboarding describes the status of a decommissioning process.""" - - key: "UuidKey" = aristaproto.message_field(1) - """ - key identifies the request for which to retrieve a decommissioning status. - """ - - status: "DecommissioningStatus" = aristaproto.enum_field(2) - """status describes the decommissioning status of the device.""" - - error: Optional[str] = aristaproto.message_field(3, wraps=aristaproto.TYPE_STRING) - """ - error is the error that caused status to become DECOMMISSIONING_STATUS_FAILURE. - """ - - status_message: Optional[str] = aristaproto.message_field( - 4, wraps=aristaproto.TYPE_STRING - ) - """ - status_message contains information on the status of the decommissioning attempt, - if any. This is generally an unstructured log message that is for display - purposes only (its structure and contents may change). - """ - - -@dataclass(eq=False, repr=False) -class Device(aristaproto.Message): - """Device describes an onboarded device.""" - - key: "DeviceKey" = aristaproto.message_field(1) - """key uniquely identifies the device.""" - - software_version: Optional[str] = aristaproto.message_field( - 2, wraps=aristaproto.TYPE_STRING - ) - """ - software_version gives the currently running device software version. - """ - - model_name: Optional[str] = aristaproto.message_field( - 3, wraps=aristaproto.TYPE_STRING - ) - """model_name describes the hardware model of this device.""" - - hardware_revision: Optional[str] = aristaproto.message_field( - 4, wraps=aristaproto.TYPE_STRING - ) - """hardware_revision describes any revisional data to the model name.""" - - fqdn: Optional[str] = aristaproto.message_field(10, wraps=aristaproto.TYPE_STRING) - """fqdn gives the device's fully qualified domain name.""" - - hostname: Optional[str] = aristaproto.message_field( - 11, wraps=aristaproto.TYPE_STRING - ) - """hostname is the hostname as reported on the device.""" - - domain_name: Optional[str] = aristaproto.message_field( - 12, wraps=aristaproto.TYPE_STRING - ) - """ - domain_name provides the domain name on which the device is registered. - """ - - system_mac_address: Optional[str] = aristaproto.message_field( - 13, wraps=aristaproto.TYPE_STRING - ) - """system_mac_address provides the MAC address of the management port.""" - - boot_time: datetime = aristaproto.message_field(20) - """boot_time indicates when the device was last booted.""" - - streaming_status: "StreamingStatus" = aristaproto.enum_field(30) - """ - streaming_status is the status of telemetry streaming for this device. - """ - - extended_attributes: "ExtendedAttributes" = aristaproto.message_field(31) - """ - extended_attributes wraps any additional, potentially non-standard, features - or attributes that the device reports. - """ - - -@dataclass(eq=False, repr=False) -class ProvisionedDevice(aristaproto.Message): - """ - ProvisionedDevice describes the provisioning status of an onboarded device - if the onboarded device is configured for provisioning. - """ - - key: "DeviceKey" = aristaproto.message_field(1) - """key uniquely identifies the device.""" - - status: "ProvisioningStatus" = aristaproto.enum_field(2) - """status describes the onboarded device's provisioning status.""" - - error: Optional[str] = aristaproto.message_field(3, wraps=aristaproto.TYPE_STRING) - """ - error is the error that caused status to become PROVISIONING_STATUS_FAILURE. - """ - - ztp_mode: Optional[bool] = aristaproto.message_field(4, wraps=aristaproto.TYPE_BOOL) - """ztp_mode indicates whether the device is in ZTP mode.""" - - ip_address: "___fmp__.IpAddress" = aristaproto.message_field(5) - """ - ip_address is the current (post-provisioning) IP address of the device. - """ - - provisioning_group_name: Optional[str] = aristaproto.message_field( - 6, wraps=aristaproto.TYPE_STRING - ) - """ - provisioning_group_name is the name of the group (also known as a container) - to which the device belongs. Any provisioning operation performed on this - group will also be performed on this device. If the device is not yet provisioned, - this will not be set. Once it is provisioned, this will be set to "undefined_container" - which indicates that the device does not yet belong to a group. At this point, - a user may set it to an existing group. - """ - - -@dataclass(eq=False, repr=False) -class DeviceRequest(aristaproto.Message): - key: "DeviceKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a Device instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceResponse(aristaproto.Message): - value: "Device" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - Device instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceStreamRequest(aristaproto.Message): - partial_eq_filter: List["Device"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceStreamResponse(aristaproto.Message): - value: "Device" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """Time holds the timestamp of this Device's last modification.""" - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the Device value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningRequest(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a DeviceDecommissioning instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningResponse(aristaproto.Message): - value: "DeviceDecommissioning" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - DeviceDecommissioning instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningStreamRequest(aristaproto.Message): - partial_eq_filter: List["DeviceDecommissioning"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningStreamResponse(aristaproto.Message): - value: "DeviceDecommissioning" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this DeviceDecommissioning's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the DeviceDecommissioning value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigRequest(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a DeviceDecommissioningConfig instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigResponse(aristaproto.Message): - value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - DeviceDecommissioningConfig instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigStreamRequest(aristaproto.Message): - partial_eq_filter: List["DeviceDecommissioningConfig"] = aristaproto.message_field( - 1 - ) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigStreamResponse(aristaproto.Message): - value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this DeviceDecommissioningConfig's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the DeviceDecommissioningConfig value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigSetRequest(aristaproto.Message): - value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) - """ - DeviceDecommissioningConfig carries the value to set into the datastore. - See the documentation on the DeviceDecommissioningConfig struct for which fields are required. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigSetResponse(aristaproto.Message): - value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) - """ - Value carries all the values given in the DeviceDecommissioningConfigSetRequest as well - as any server-generated values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - creation. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==CreatedAt will include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigDeleteRequest(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key indicates which DeviceDecommissioningConfig instance to remove. - This field must always be set. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigDeleteResponse(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key echoes back the key of the deleted DeviceDecommissioningConfig instance. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - deletion. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==DeletedAt will not include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigDeleteAllRequest(aristaproto.Message): - pass - - -@dataclass(eq=False, repr=False) -class DeviceDecommissioningConfigDeleteAllResponse(aristaproto.Message): - type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" - - error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """This indicates the error message from the delete failure.""" - - key: "UuidKey" = aristaproto.message_field(3) - """ - This is the key of the DeviceDecommissioningConfig instance that failed to be deleted. - """ - - time: datetime = aristaproto.message_field(4) - """Time indicates the (UTC) timestamp when the key was being deleted.""" - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingRequest(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a DeviceOnboarding instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingResponse(aristaproto.Message): - value: "DeviceOnboarding" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - DeviceOnboarding instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingStreamRequest(aristaproto.Message): - partial_eq_filter: List["DeviceOnboarding"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingStreamResponse(aristaproto.Message): - value: "DeviceOnboarding" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this DeviceOnboarding's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the DeviceOnboarding value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigRequest(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a DeviceOnboardingConfig instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigResponse(aristaproto.Message): - value: "DeviceOnboardingConfig" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - DeviceOnboardingConfig instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigStreamRequest(aristaproto.Message): - partial_eq_filter: List["DeviceOnboardingConfig"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigStreamResponse(aristaproto.Message): - value: "DeviceOnboardingConfig" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this DeviceOnboardingConfig's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the DeviceOnboardingConfig value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigSetRequest(aristaproto.Message): - value: "DeviceOnboardingConfig" = aristaproto.message_field(1) - """ - DeviceOnboardingConfig carries the value to set into the datastore. - See the documentation on the DeviceOnboardingConfig struct for which fields are required. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigSetResponse(aristaproto.Message): - value: "DeviceOnboardingConfig" = aristaproto.message_field(1) - """ - Value carries all the values given in the DeviceOnboardingConfigSetRequest as well - as any server-generated values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - creation. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==CreatedAt will include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigDeleteRequest(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key indicates which DeviceOnboardingConfig instance to remove. - This field must always be set. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigDeleteResponse(aristaproto.Message): - key: "UuidKey" = aristaproto.message_field(1) - """ - Key echoes back the key of the deleted DeviceOnboardingConfig instance. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - deletion. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==DeletedAt will not include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigDeleteAllRequest(aristaproto.Message): - pass - - -@dataclass(eq=False, repr=False) -class DeviceOnboardingConfigDeleteAllResponse(aristaproto.Message): - type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" - - error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """This indicates the error message from the delete failure.""" - - key: "UuidKey" = aristaproto.message_field(3) - """ - This is the key of the DeviceOnboardingConfig instance that failed to be deleted. - """ - - time: datetime = aristaproto.message_field(4) - """Time indicates the (UTC) timestamp when the key was being deleted.""" - - -@dataclass(eq=False, repr=False) -class ProvisionedDeviceRequest(aristaproto.Message): - key: "DeviceKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a ProvisionedDevice instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class ProvisionedDeviceResponse(aristaproto.Message): - value: "ProvisionedDevice" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - ProvisionedDevice instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class ProvisionedDeviceStreamRequest(aristaproto.Message): - partial_eq_filter: List["ProvisionedDevice"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class ProvisionedDeviceStreamResponse(aristaproto.Message): - value: "ProvisionedDevice" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this ProvisionedDevice's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the ProvisionedDevice value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -class DeviceServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_request: "DeviceRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceService/GetOne", - device_request, - DeviceResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_stream_request: "DeviceStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceService/GetAll", - device_stream_request, - DeviceStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_stream_request: "DeviceStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceService/Subscribe", - device_stream_request, - DeviceStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceDecommissioningServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_decommissioning_request: "DeviceDecommissioningRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceDecommissioningResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceDecommissioningService/GetOne", - device_decommissioning_request, - DeviceDecommissioningResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceDecommissioningService/GetAll", - device_decommissioning_stream_request, - DeviceDecommissioningStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceDecommissioningService/Subscribe", - device_decommissioning_stream_request, - DeviceDecommissioningStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceDecommissioningConfigServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_decommissioning_config_request: "DeviceDecommissioningConfigRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceDecommissioningConfigResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceDecommissioningConfigService/GetOne", - device_decommissioning_config_request, - DeviceDecommissioningConfigResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceDecommissioningConfigService/GetAll", - device_decommissioning_config_stream_request, - DeviceDecommissioningConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceDecommissioningConfigService/Subscribe", - device_decommissioning_config_stream_request, - DeviceDecommissioningConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def set( - self, - device_decommissioning_config_set_request: "DeviceDecommissioningConfigSetRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceDecommissioningConfigSetResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceDecommissioningConfigService/Set", - device_decommissioning_config_set_request, - DeviceDecommissioningConfigSetResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete( - self, - device_decommissioning_config_delete_request: "DeviceDecommissioningConfigDeleteRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceDecommissioningConfigDeleteResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceDecommissioningConfigService/Delete", - device_decommissioning_config_delete_request, - DeviceDecommissioningConfigDeleteResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete_all( - self, - device_decommissioning_config_delete_all_request: "DeviceDecommissioningConfigDeleteAllRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceDecommissioningConfigDeleteAllResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceDecommissioningConfigService/DeleteAll", - device_decommissioning_config_delete_all_request, - DeviceDecommissioningConfigDeleteAllResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceOnboardingServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_onboarding_request: "DeviceOnboardingRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceOnboardingResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceOnboardingService/GetOne", - device_onboarding_request, - DeviceOnboardingResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_onboarding_stream_request: "DeviceOnboardingStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceOnboardingService/GetAll", - device_onboarding_stream_request, - DeviceOnboardingStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_onboarding_stream_request: "DeviceOnboardingStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceOnboardingService/Subscribe", - device_onboarding_stream_request, - DeviceOnboardingStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceOnboardingConfigServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_onboarding_config_request: "DeviceOnboardingConfigRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceOnboardingConfigResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceOnboardingConfigService/GetOne", - device_onboarding_config_request, - DeviceOnboardingConfigResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceOnboardingConfigService/GetAll", - device_onboarding_config_stream_request, - DeviceOnboardingConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceOnboardingConfigService/Subscribe", - device_onboarding_config_stream_request, - DeviceOnboardingConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def set( - self, - device_onboarding_config_set_request: "DeviceOnboardingConfigSetRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceOnboardingConfigSetResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceOnboardingConfigService/Set", - device_onboarding_config_set_request, - DeviceOnboardingConfigSetResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete( - self, - device_onboarding_config_delete_request: "DeviceOnboardingConfigDeleteRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceOnboardingConfigDeleteResponse": - return await self._unary_unary( - "/arista.inventory.v1.DeviceOnboardingConfigService/Delete", - device_onboarding_config_delete_request, - DeviceOnboardingConfigDeleteResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete_all( - self, - device_onboarding_config_delete_all_request: "DeviceOnboardingConfigDeleteAllRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceOnboardingConfigDeleteAllResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.DeviceOnboardingConfigService/DeleteAll", - device_onboarding_config_delete_all_request, - DeviceOnboardingConfigDeleteAllResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class ProvisionedDeviceServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - provisioned_device_request: "ProvisionedDeviceRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "ProvisionedDeviceResponse": - return await self._unary_unary( - "/arista.inventory.v1.ProvisionedDeviceService/GetOne", - provisioned_device_request, - ProvisionedDeviceResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - provisioned_device_stream_request: "ProvisionedDeviceStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.ProvisionedDeviceService/GetAll", - provisioned_device_stream_request, - ProvisionedDeviceStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - provisioned_device_stream_request: "ProvisionedDeviceStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: - async for response in self._unary_stream( - "/arista.inventory.v1.ProvisionedDeviceService/Subscribe", - provisioned_device_stream_request, - ProvisionedDeviceStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceServiceBase(ServiceBase): - - async def get_one(self, device_request: "DeviceRequest") -> "DeviceResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, device_stream_request: "DeviceStreamRequest" - ) -> AsyncIterator["DeviceStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, device_stream_request: "DeviceStreamRequest" - ) -> AsyncIterator["DeviceStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, stream: "grpclib.server.Stream[DeviceRequest, DeviceResponse]" - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, stream: "grpclib.server.Stream[DeviceStreamRequest, DeviceStreamResponse]" - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, stream: "grpclib.server.Stream[DeviceStreamRequest, DeviceStreamResponse]" - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.inventory.v1.DeviceService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceRequest, - DeviceResponse, - ), - "/arista.inventory.v1.DeviceService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceStreamRequest, - DeviceStreamResponse, - ), - "/arista.inventory.v1.DeviceService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceStreamRequest, - DeviceStreamResponse, - ), - } - - -class DeviceDecommissioningServiceBase(ServiceBase): - - async def get_one( - self, device_decommissioning_request: "DeviceDecommissioningRequest" - ) -> "DeviceDecommissioningResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, - device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", - ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, - device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", - ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningRequest, DeviceDecommissioningResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningStreamRequest, DeviceDecommissioningStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningStreamRequest, DeviceDecommissioningStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.inventory.v1.DeviceDecommissioningService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceDecommissioningRequest, - DeviceDecommissioningResponse, - ), - "/arista.inventory.v1.DeviceDecommissioningService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceDecommissioningStreamRequest, - DeviceDecommissioningStreamResponse, - ), - "/arista.inventory.v1.DeviceDecommissioningService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceDecommissioningStreamRequest, - DeviceDecommissioningStreamResponse, - ), - } - - -class DeviceDecommissioningConfigServiceBase(ServiceBase): - - async def get_one( - self, - device_decommissioning_config_request: "DeviceDecommissioningConfigRequest", - ) -> "DeviceDecommissioningConfigResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, - device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", - ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, - device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", - ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def set( - self, - device_decommissioning_config_set_request: "DeviceDecommissioningConfigSetRequest", - ) -> "DeviceDecommissioningConfigSetResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete( - self, - device_decommissioning_config_delete_request: "DeviceDecommissioningConfigDeleteRequest", - ) -> "DeviceDecommissioningConfigDeleteResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete_all( - self, - device_decommissioning_config_delete_all_request: "DeviceDecommissioningConfigDeleteAllRequest", - ) -> AsyncIterator["DeviceDecommissioningConfigDeleteAllResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningConfigRequest, DeviceDecommissioningConfigResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningConfigStreamRequest, DeviceDecommissioningConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningConfigStreamRequest, DeviceDecommissioningConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - async def __rpc_set( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningConfigSetRequest, DeviceDecommissioningConfigSetResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.set(request) - await stream.send_message(response) - - async def __rpc_delete( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningConfigDeleteRequest, DeviceDecommissioningConfigDeleteResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.delete(request) - await stream.send_message(response) - - async def __rpc_delete_all( - self, - stream: "grpclib.server.Stream[DeviceDecommissioningConfigDeleteAllRequest, DeviceDecommissioningConfigDeleteAllResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.delete_all, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.inventory.v1.DeviceDecommissioningConfigService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceDecommissioningConfigRequest, - DeviceDecommissioningConfigResponse, - ), - "/arista.inventory.v1.DeviceDecommissioningConfigService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceDecommissioningConfigStreamRequest, - DeviceDecommissioningConfigStreamResponse, - ), - "/arista.inventory.v1.DeviceDecommissioningConfigService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceDecommissioningConfigStreamRequest, - DeviceDecommissioningConfigStreamResponse, - ), - "/arista.inventory.v1.DeviceDecommissioningConfigService/Set": grpclib.const.Handler( - self.__rpc_set, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceDecommissioningConfigSetRequest, - DeviceDecommissioningConfigSetResponse, - ), - "/arista.inventory.v1.DeviceDecommissioningConfigService/Delete": grpclib.const.Handler( - self.__rpc_delete, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceDecommissioningConfigDeleteRequest, - DeviceDecommissioningConfigDeleteResponse, - ), - "/arista.inventory.v1.DeviceDecommissioningConfigService/DeleteAll": grpclib.const.Handler( - self.__rpc_delete_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceDecommissioningConfigDeleteAllRequest, - DeviceDecommissioningConfigDeleteAllResponse, - ), - } - - -class DeviceOnboardingServiceBase(ServiceBase): - - async def get_one( - self, device_onboarding_request: "DeviceOnboardingRequest" - ) -> "DeviceOnboardingResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, device_onboarding_stream_request: "DeviceOnboardingStreamRequest" - ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, device_onboarding_stream_request: "DeviceOnboardingStreamRequest" - ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[DeviceOnboardingRequest, DeviceOnboardingResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[DeviceOnboardingStreamRequest, DeviceOnboardingStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[DeviceOnboardingStreamRequest, DeviceOnboardingStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.inventory.v1.DeviceOnboardingService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceOnboardingRequest, - DeviceOnboardingResponse, - ), - "/arista.inventory.v1.DeviceOnboardingService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceOnboardingStreamRequest, - DeviceOnboardingStreamResponse, - ), - "/arista.inventory.v1.DeviceOnboardingService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceOnboardingStreamRequest, - DeviceOnboardingStreamResponse, - ), - } - - -class DeviceOnboardingConfigServiceBase(ServiceBase): - - async def get_one( - self, device_onboarding_config_request: "DeviceOnboardingConfigRequest" - ) -> "DeviceOnboardingConfigResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, - device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", - ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, - device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", - ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def set( - self, device_onboarding_config_set_request: "DeviceOnboardingConfigSetRequest" - ) -> "DeviceOnboardingConfigSetResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete( - self, - device_onboarding_config_delete_request: "DeviceOnboardingConfigDeleteRequest", - ) -> "DeviceOnboardingConfigDeleteResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete_all( - self, - device_onboarding_config_delete_all_request: "DeviceOnboardingConfigDeleteAllRequest", - ) -> AsyncIterator["DeviceOnboardingConfigDeleteAllResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[DeviceOnboardingConfigRequest, DeviceOnboardingConfigResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[DeviceOnboardingConfigStreamRequest, DeviceOnboardingConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[DeviceOnboardingConfigStreamRequest, DeviceOnboardingConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - async def __rpc_set( - self, - stream: "grpclib.server.Stream[DeviceOnboardingConfigSetRequest, DeviceOnboardingConfigSetResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.set(request) - await stream.send_message(response) - - async def __rpc_delete( - self, - stream: "grpclib.server.Stream[DeviceOnboardingConfigDeleteRequest, DeviceOnboardingConfigDeleteResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.delete(request) - await stream.send_message(response) - - async def __rpc_delete_all( - self, - stream: "grpclib.server.Stream[DeviceOnboardingConfigDeleteAllRequest, DeviceOnboardingConfigDeleteAllResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.delete_all, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.inventory.v1.DeviceOnboardingConfigService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceOnboardingConfigRequest, - DeviceOnboardingConfigResponse, - ), - "/arista.inventory.v1.DeviceOnboardingConfigService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceOnboardingConfigStreamRequest, - DeviceOnboardingConfigStreamResponse, - ), - "/arista.inventory.v1.DeviceOnboardingConfigService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceOnboardingConfigStreamRequest, - DeviceOnboardingConfigStreamResponse, - ), - "/arista.inventory.v1.DeviceOnboardingConfigService/Set": grpclib.const.Handler( - self.__rpc_set, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceOnboardingConfigSetRequest, - DeviceOnboardingConfigSetResponse, - ), - "/arista.inventory.v1.DeviceOnboardingConfigService/Delete": grpclib.const.Handler( - self.__rpc_delete, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceOnboardingConfigDeleteRequest, - DeviceOnboardingConfigDeleteResponse, - ), - "/arista.inventory.v1.DeviceOnboardingConfigService/DeleteAll": grpclib.const.Handler( - self.__rpc_delete_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceOnboardingConfigDeleteAllRequest, - DeviceOnboardingConfigDeleteAllResponse, - ), - } - - -class ProvisionedDeviceServiceBase(ServiceBase): - - async def get_one( - self, provisioned_device_request: "ProvisionedDeviceRequest" - ) -> "ProvisionedDeviceResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, provisioned_device_stream_request: "ProvisionedDeviceStreamRequest" - ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, provisioned_device_stream_request: "ProvisionedDeviceStreamRequest" - ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[ProvisionedDeviceRequest, ProvisionedDeviceResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[ProvisionedDeviceStreamRequest, ProvisionedDeviceStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[ProvisionedDeviceStreamRequest, ProvisionedDeviceStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.inventory.v1.ProvisionedDeviceService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - ProvisionedDeviceRequest, - ProvisionedDeviceResponse, - ), - "/arista.inventory.v1.ProvisionedDeviceService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - ProvisionedDeviceStreamRequest, - ProvisionedDeviceStreamResponse, - ), - "/arista.inventory.v1.ProvisionedDeviceService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - ProvisionedDeviceStreamRequest, - ProvisionedDeviceStreamResponse, - ), - } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/v1/__init__.py b/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/v1/__init__.py deleted file mode 100644 index 784a358e38c..00000000000 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/v1/__init__.py +++ /dev/null @@ -1,2130 +0,0 @@ -# Copyright (c) 2024 Arista Networks, Inc. -# Use of this source code is governed by the Apache License 2.0 -# that can be found in the LICENSE file. -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: arista/tag.v1/services.gen.proto, arista/tag.v1/tag.proto -# plugin: python-aristaproto -# This file has been @generated - -from dataclasses import dataclass -from datetime import datetime -from typing import ( - TYPE_CHECKING, - AsyncIterator, - Dict, - List, - Optional, -) - -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True - -from .... import fmp as ___fmp__ -from ... import ( - subscriptions as __subscriptions__, - time as __time__, -) - - -if TYPE_CHECKING: - import grpclib.server - from aristaproto.grpc.grpclib_client import MetadataLike - from grpclib.metadata import Deadline - - -class CreatorType(aristaproto.Enum): - """CreatorType specifies an entity that creates something.""" - - UNSPECIFIED = 0 - SYSTEM = 1 - """CREATOR_TYPE_SYSTEM is the type for something created by the system.""" - - USER = 2 - """CREATOR_TYPE_USER is the type for something created by a user.""" - - -@dataclass(eq=False, repr=False) -class TagKey(aristaproto.Message): - """TagKey uniquely identifies a tag for a network element.""" - - label: Optional[str] = aristaproto.message_field(1, wraps=aristaproto.TYPE_STRING) - """Label is the label of the tag.""" - - value: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """Value is the value of the tag.""" - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfig(aristaproto.Message): - """ - InterfaceTagConfig is a label-value pair that may or may - not be assigned to an interface. - """ - - key: "TagKey" = aristaproto.message_field(1) - """Key uniquely identifies the interface tag.""" - - -@dataclass(eq=False, repr=False) -class InterfaceTag(aristaproto.Message): - """ - InterfaceTag is a label-value pair that may or may - not be assigned to an interface. - """ - - key: "TagKey" = aristaproto.message_field(1) - """Key uniquely identifies the interface tag.""" - - creator_type: "CreatorType" = aristaproto.enum_field(2) - """CreatorType is the creator type of the tag.""" - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentKey(aristaproto.Message): - """ - InterfaceTagAssignmentKey uniquely identifies an interface - tag assignment. - """ - - label: Optional[str] = aristaproto.message_field(1, wraps=aristaproto.TYPE_STRING) - """Label is the label of the tag.""" - - value: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """Value is the value of the tag.""" - - device_id: Optional[str] = aristaproto.message_field( - 3, wraps=aristaproto.TYPE_STRING - ) - """DeviceId is the ID of the interface's device.""" - - interface_id: Optional[str] = aristaproto.message_field( - 4, wraps=aristaproto.TYPE_STRING - ) - """InterfaceId is the ID of the interface.""" - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfig(aristaproto.Message): - """ - InterfaceTagAssignmentConfig is the assignment of an interface tag - to a specific interface. - """ - - key: "InterfaceTagAssignmentKey" = aristaproto.message_field(1) - """Key uniquely identifies the interface tag assignment.""" - - -@dataclass(eq=False, repr=False) -class DeviceTagConfig(aristaproto.Message): - """ - DeviceTagConfig is a label-value pair that may or may not - be assigned to a device. - """ - - key: "TagKey" = aristaproto.message_field(1) - """Key uniquely identifies the device tag.""" - - -@dataclass(eq=False, repr=False) -class DeviceTag(aristaproto.Message): - """ - DeviceTag is a label-value pair that may or may not - be assigned to a device. - """ - - key: "TagKey" = aristaproto.message_field(1) - """Key uniquely identifies the device tag.""" - - creator_type: "CreatorType" = aristaproto.enum_field(2) - """CreatorType is the creator type of the tag.""" - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentKey(aristaproto.Message): - """ - DeviceTagAssignmentKey uniquely identifies a device tag - assignment. - """ - - label: Optional[str] = aristaproto.message_field(1, wraps=aristaproto.TYPE_STRING) - """Label is the label of the tag.""" - - value: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """Value is the value of the tag.""" - - device_id: Optional[str] = aristaproto.message_field( - 3, wraps=aristaproto.TYPE_STRING - ) - """DeviceId is the ID of the device.""" - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfig(aristaproto.Message): - """ - DeviceTagAssignmentConfig is the assignment of a device tag to a - specific device. - """ - - key: "DeviceTagAssignmentKey" = aristaproto.message_field(1) - """Key uniquely identifies the device tag assignment.""" - - -@dataclass(eq=False, repr=False) -class DeviceTagRequest(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a DeviceTag instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagResponse(aristaproto.Message): - value: "DeviceTag" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - DeviceTag instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagStreamRequest(aristaproto.Message): - partial_eq_filter: List["DeviceTag"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagStreamResponse(aristaproto.Message): - value: "DeviceTag" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """Time holds the timestamp of this DeviceTag's last modification.""" - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the DeviceTag value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigRequest(aristaproto.Message): - key: "DeviceTagAssignmentKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a DeviceTagAssignmentConfig instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigResponse(aristaproto.Message): - value: "DeviceTagAssignmentConfig" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - DeviceTagAssignmentConfig instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigStreamRequest(aristaproto.Message): - partial_eq_filter: List["DeviceTagAssignmentConfig"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigStreamResponse(aristaproto.Message): - value: "DeviceTagAssignmentConfig" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this DeviceTagAssignmentConfig's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the DeviceTagAssignmentConfig value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigSetRequest(aristaproto.Message): - value: "DeviceTagAssignmentConfig" = aristaproto.message_field(1) - """ - DeviceTagAssignmentConfig carries the value to set into the datastore. - See the documentation on the DeviceTagAssignmentConfig struct for which fields are required. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigSetResponse(aristaproto.Message): - value: "DeviceTagAssignmentConfig" = aristaproto.message_field(1) - """ - Value carries all the values given in the DeviceTagAssignmentConfigSetRequest as well - as any server-generated values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - creation. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==CreatedAt will include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigDeleteRequest(aristaproto.Message): - key: "DeviceTagAssignmentKey" = aristaproto.message_field(1) - """ - Key indicates which DeviceTagAssignmentConfig instance to remove. - This field must always be set. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigDeleteResponse(aristaproto.Message): - key: "DeviceTagAssignmentKey" = aristaproto.message_field(1) - """ - Key echoes back the key of the deleted DeviceTagAssignmentConfig instance. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - deletion. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==DeletedAt will not include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigDeleteAllRequest(aristaproto.Message): - pass - - -@dataclass(eq=False, repr=False) -class DeviceTagAssignmentConfigDeleteAllResponse(aristaproto.Message): - type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" - - error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """This indicates the error message from the delete failure.""" - - key: "DeviceTagAssignmentKey" = aristaproto.message_field(3) - """ - This is the key of the DeviceTagAssignmentConfig instance that failed to be deleted. - """ - - time: datetime = aristaproto.message_field(4) - """Time indicates the (UTC) timestamp when the key was being deleted.""" - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigRequest(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a DeviceTagConfig instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigResponse(aristaproto.Message): - value: "DeviceTagConfig" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - DeviceTagConfig instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigStreamRequest(aristaproto.Message): - partial_eq_filter: List["DeviceTagConfig"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigStreamResponse(aristaproto.Message): - value: "DeviceTagConfig" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this DeviceTagConfig's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the DeviceTagConfig value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigSetRequest(aristaproto.Message): - value: "DeviceTagConfig" = aristaproto.message_field(1) - """ - DeviceTagConfig carries the value to set into the datastore. - See the documentation on the DeviceTagConfig struct for which fields are required. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigSetResponse(aristaproto.Message): - value: "DeviceTagConfig" = aristaproto.message_field(1) - """ - Value carries all the values given in the DeviceTagConfigSetRequest as well - as any server-generated values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - creation. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==CreatedAt will include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigDeleteRequest(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """ - Key indicates which DeviceTagConfig instance to remove. - This field must always be set. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigDeleteResponse(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """Key echoes back the key of the deleted DeviceTagConfig instance.""" - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - deletion. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==DeletedAt will not include this instance. - """ - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigDeleteAllRequest(aristaproto.Message): - pass - - -@dataclass(eq=False, repr=False) -class DeviceTagConfigDeleteAllResponse(aristaproto.Message): - type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" - - error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """This indicates the error message from the delete failure.""" - - key: "TagKey" = aristaproto.message_field(3) - """ - This is the key of the DeviceTagConfig instance that failed to be deleted. - """ - - time: datetime = aristaproto.message_field(4) - """Time indicates the (UTC) timestamp when the key was being deleted.""" - - -@dataclass(eq=False, repr=False) -class InterfaceTagRequest(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a InterfaceTag instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagResponse(aristaproto.Message): - value: "InterfaceTag" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - InterfaceTag instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagStreamRequest(aristaproto.Message): - partial_eq_filter: List["InterfaceTag"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagStreamResponse(aristaproto.Message): - value: "InterfaceTag" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """Time holds the timestamp of this InterfaceTag's last modification.""" - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the InterfaceTag value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigRequest(aristaproto.Message): - key: "InterfaceTagAssignmentKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a InterfaceTagAssignmentConfig instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigResponse(aristaproto.Message): - value: "InterfaceTagAssignmentConfig" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - InterfaceTagAssignmentConfig instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigStreamRequest(aristaproto.Message): - partial_eq_filter: List["InterfaceTagAssignmentConfig"] = aristaproto.message_field( - 1 - ) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigStreamResponse(aristaproto.Message): - value: "InterfaceTagAssignmentConfig" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this InterfaceTagAssignmentConfig's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the InterfaceTagAssignmentConfig value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigSetRequest(aristaproto.Message): - value: "InterfaceTagAssignmentConfig" = aristaproto.message_field(1) - """ - InterfaceTagAssignmentConfig carries the value to set into the datastore. - See the documentation on the InterfaceTagAssignmentConfig struct for which fields are required. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigSetResponse(aristaproto.Message): - value: "InterfaceTagAssignmentConfig" = aristaproto.message_field(1) - """ - Value carries all the values given in the InterfaceTagAssignmentConfigSetRequest as well - as any server-generated values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - creation. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==CreatedAt will include this instance. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigDeleteRequest(aristaproto.Message): - key: "InterfaceTagAssignmentKey" = aristaproto.message_field(1) - """ - Key indicates which InterfaceTagAssignmentConfig instance to remove. - This field must always be set. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigDeleteResponse(aristaproto.Message): - key: "InterfaceTagAssignmentKey" = aristaproto.message_field(1) - """ - Key echoes back the key of the deleted InterfaceTagAssignmentConfig instance. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - deletion. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==DeletedAt will not include this instance. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigDeleteAllRequest(aristaproto.Message): - pass - - -@dataclass(eq=False, repr=False) -class InterfaceTagAssignmentConfigDeleteAllResponse(aristaproto.Message): - type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" - - error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """This indicates the error message from the delete failure.""" - - key: "InterfaceTagAssignmentKey" = aristaproto.message_field(3) - """ - This is the key of the InterfaceTagAssignmentConfig instance that failed to be deleted. - """ - - time: datetime = aristaproto.message_field(4) - """Time indicates the (UTC) timestamp when the key was being deleted.""" - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigRequest(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """ - Key uniquely identifies a InterfaceTagConfig instance to retrieve. - This value must be populated. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the time for which you are interested in the data. - If no time is given, the server will use the time at which it makes the request. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigResponse(aristaproto.Message): - value: "InterfaceTagConfig" = aristaproto.message_field(1) - """ - Value is the value requested. - This structure will be fully-populated as it exists in the datastore. If - optional fields were not given at creation, these fields will be empty or - set to default values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time carries the (UTC) timestamp of the last-modification of the - InterfaceTagConfig instance in this response. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigStreamRequest(aristaproto.Message): - partial_eq_filter: List["InterfaceTagConfig"] = aristaproto.message_field(1) - """ - PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. - This requires all provided fields to be equal to the response. - - While transparent to users, this field also allows services to optimize internal - subscriptions if filter(s) are sufficiently specific. - """ - - time: "__time__.TimeBounds" = aristaproto.message_field(3) - """ - TimeRange allows limiting response data to within a specified time window. - If this field is populated, at least one of the two time fields are required. - - This field is not allowed in the Subscribe RPC. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigStreamResponse(aristaproto.Message): - value: "InterfaceTagConfig" = aristaproto.message_field(1) - """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time holds the timestamp of this InterfaceTagConfig's last modification. - """ - - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) - """ - Operation indicates how the InterfaceTagConfig value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigSetRequest(aristaproto.Message): - value: "InterfaceTagConfig" = aristaproto.message_field(1) - """ - InterfaceTagConfig carries the value to set into the datastore. - See the documentation on the InterfaceTagConfig struct for which fields are required. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigSetResponse(aristaproto.Message): - value: "InterfaceTagConfig" = aristaproto.message_field(1) - """ - Value carries all the values given in the InterfaceTagConfigSetRequest as well - as any server-generated values. - """ - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - creation. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==CreatedAt will include this instance. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigDeleteRequest(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """ - Key indicates which InterfaceTagConfig instance to remove. - This field must always be set. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigDeleteResponse(aristaproto.Message): - key: "TagKey" = aristaproto.message_field(1) - """Key echoes back the key of the deleted InterfaceTagConfig instance.""" - - time: datetime = aristaproto.message_field(2) - """ - Time indicates the (UTC) timestamp at which the system recognizes the - deletion. The only guarantees made about this timestamp are: - - - it is after the time the request was received - - a time-ranged query with StartTime==DeletedAt will not include this instance. - """ - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigDeleteAllRequest(aristaproto.Message): - pass - - -@dataclass(eq=False, repr=False) -class InterfaceTagConfigDeleteAllResponse(aristaproto.Message): - type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" - - error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) - """This indicates the error message from the delete failure.""" - - key: "TagKey" = aristaproto.message_field(3) - """ - This is the key of the InterfaceTagConfig instance that failed to be deleted. - """ - - time: datetime = aristaproto.message_field(4) - """Time indicates the (UTC) timestamp when the key was being deleted.""" - - -class DeviceTagServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_tag_request: "DeviceTagRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceTagResponse": - return await self._unary_unary( - "/arista.tag.v1.DeviceTagService/GetOne", - device_tag_request, - DeviceTagResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_tag_stream_request: "DeviceTagStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagService/GetAll", - device_tag_stream_request, - DeviceTagStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_tag_stream_request: "DeviceTagStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagService/Subscribe", - device_tag_stream_request, - DeviceTagStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceTagAssignmentConfigServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_tag_assignment_config_request: "DeviceTagAssignmentConfigRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceTagAssignmentConfigResponse": - return await self._unary_unary( - "/arista.tag.v1.DeviceTagAssignmentConfigService/GetOne", - device_tag_assignment_config_request, - DeviceTagAssignmentConfigResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_tag_assignment_config_stream_request: "DeviceTagAssignmentConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagAssignmentConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagAssignmentConfigService/GetAll", - device_tag_assignment_config_stream_request, - DeviceTagAssignmentConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_tag_assignment_config_stream_request: "DeviceTagAssignmentConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagAssignmentConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagAssignmentConfigService/Subscribe", - device_tag_assignment_config_stream_request, - DeviceTagAssignmentConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def set( - self, - device_tag_assignment_config_set_request: "DeviceTagAssignmentConfigSetRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceTagAssignmentConfigSetResponse": - return await self._unary_unary( - "/arista.tag.v1.DeviceTagAssignmentConfigService/Set", - device_tag_assignment_config_set_request, - DeviceTagAssignmentConfigSetResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete( - self, - device_tag_assignment_config_delete_request: "DeviceTagAssignmentConfigDeleteRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceTagAssignmentConfigDeleteResponse": - return await self._unary_unary( - "/arista.tag.v1.DeviceTagAssignmentConfigService/Delete", - device_tag_assignment_config_delete_request, - DeviceTagAssignmentConfigDeleteResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete_all( - self, - device_tag_assignment_config_delete_all_request: "DeviceTagAssignmentConfigDeleteAllRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagAssignmentConfigDeleteAllResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagAssignmentConfigService/DeleteAll", - device_tag_assignment_config_delete_all_request, - DeviceTagAssignmentConfigDeleteAllResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceTagConfigServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - device_tag_config_request: "DeviceTagConfigRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceTagConfigResponse": - return await self._unary_unary( - "/arista.tag.v1.DeviceTagConfigService/GetOne", - device_tag_config_request, - DeviceTagConfigResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - device_tag_config_stream_request: "DeviceTagConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagConfigService/GetAll", - device_tag_config_stream_request, - DeviceTagConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - device_tag_config_stream_request: "DeviceTagConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagConfigService/Subscribe", - device_tag_config_stream_request, - DeviceTagConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def set( - self, - device_tag_config_set_request: "DeviceTagConfigSetRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceTagConfigSetResponse": - return await self._unary_unary( - "/arista.tag.v1.DeviceTagConfigService/Set", - device_tag_config_set_request, - DeviceTagConfigSetResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete( - self, - device_tag_config_delete_request: "DeviceTagConfigDeleteRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "DeviceTagConfigDeleteResponse": - return await self._unary_unary( - "/arista.tag.v1.DeviceTagConfigService/Delete", - device_tag_config_delete_request, - DeviceTagConfigDeleteResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete_all( - self, - device_tag_config_delete_all_request: "DeviceTagConfigDeleteAllRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["DeviceTagConfigDeleteAllResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.DeviceTagConfigService/DeleteAll", - device_tag_config_delete_all_request, - DeviceTagConfigDeleteAllResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class InterfaceTagServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - interface_tag_request: "InterfaceTagRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "InterfaceTagResponse": - return await self._unary_unary( - "/arista.tag.v1.InterfaceTagService/GetOne", - interface_tag_request, - InterfaceTagResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - interface_tag_stream_request: "InterfaceTagStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagService/GetAll", - interface_tag_stream_request, - InterfaceTagStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - interface_tag_stream_request: "InterfaceTagStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagService/Subscribe", - interface_tag_stream_request, - InterfaceTagStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class InterfaceTagAssignmentConfigServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - interface_tag_assignment_config_request: "InterfaceTagAssignmentConfigRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "InterfaceTagAssignmentConfigResponse": - return await self._unary_unary( - "/arista.tag.v1.InterfaceTagAssignmentConfigService/GetOne", - interface_tag_assignment_config_request, - InterfaceTagAssignmentConfigResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - interface_tag_assignment_config_stream_request: "InterfaceTagAssignmentConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagAssignmentConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagAssignmentConfigService/GetAll", - interface_tag_assignment_config_stream_request, - InterfaceTagAssignmentConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - interface_tag_assignment_config_stream_request: "InterfaceTagAssignmentConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagAssignmentConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagAssignmentConfigService/Subscribe", - interface_tag_assignment_config_stream_request, - InterfaceTagAssignmentConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def set( - self, - interface_tag_assignment_config_set_request: "InterfaceTagAssignmentConfigSetRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "InterfaceTagAssignmentConfigSetResponse": - return await self._unary_unary( - "/arista.tag.v1.InterfaceTagAssignmentConfigService/Set", - interface_tag_assignment_config_set_request, - InterfaceTagAssignmentConfigSetResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete( - self, - interface_tag_assignment_config_delete_request: "InterfaceTagAssignmentConfigDeleteRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "InterfaceTagAssignmentConfigDeleteResponse": - return await self._unary_unary( - "/arista.tag.v1.InterfaceTagAssignmentConfigService/Delete", - interface_tag_assignment_config_delete_request, - InterfaceTagAssignmentConfigDeleteResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete_all( - self, - interface_tag_assignment_config_delete_all_request: "InterfaceTagAssignmentConfigDeleteAllRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagAssignmentConfigDeleteAllResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagAssignmentConfigService/DeleteAll", - interface_tag_assignment_config_delete_all_request, - InterfaceTagAssignmentConfigDeleteAllResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class InterfaceTagConfigServiceStub(aristaproto.ServiceStub): - async def get_one( - self, - interface_tag_config_request: "InterfaceTagConfigRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "InterfaceTagConfigResponse": - return await self._unary_unary( - "/arista.tag.v1.InterfaceTagConfigService/GetOne", - interface_tag_config_request, - InterfaceTagConfigResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def get_all( - self, - interface_tag_config_stream_request: "InterfaceTagConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagConfigService/GetAll", - interface_tag_config_stream_request, - InterfaceTagConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def subscribe( - self, - interface_tag_config_stream_request: "InterfaceTagConfigStreamRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagConfigStreamResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagConfigService/Subscribe", - interface_tag_config_stream_request, - InterfaceTagConfigStreamResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - async def set( - self, - interface_tag_config_set_request: "InterfaceTagConfigSetRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "InterfaceTagConfigSetResponse": - return await self._unary_unary( - "/arista.tag.v1.InterfaceTagConfigService/Set", - interface_tag_config_set_request, - InterfaceTagConfigSetResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete( - self, - interface_tag_config_delete_request: "InterfaceTagConfigDeleteRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> "InterfaceTagConfigDeleteResponse": - return await self._unary_unary( - "/arista.tag.v1.InterfaceTagConfigService/Delete", - interface_tag_config_delete_request, - InterfaceTagConfigDeleteResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ) - - async def delete_all( - self, - interface_tag_config_delete_all_request: "InterfaceTagConfigDeleteAllRequest", - *, - timeout: Optional[float] = None, - deadline: Optional["Deadline"] = None, - metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["InterfaceTagConfigDeleteAllResponse"]: - async for response in self._unary_stream( - "/arista.tag.v1.InterfaceTagConfigService/DeleteAll", - interface_tag_config_delete_all_request, - InterfaceTagConfigDeleteAllResponse, - timeout=timeout, - deadline=deadline, - metadata=metadata, - ): - yield response - - -class DeviceTagServiceBase(ServiceBase): - - async def get_one( - self, device_tag_request: "DeviceTagRequest" - ) -> "DeviceTagResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, device_tag_stream_request: "DeviceTagStreamRequest" - ) -> AsyncIterator["DeviceTagStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, device_tag_stream_request: "DeviceTagStreamRequest" - ) -> AsyncIterator["DeviceTagStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, stream: "grpclib.server.Stream[DeviceTagRequest, DeviceTagResponse]" - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[DeviceTagStreamRequest, DeviceTagStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[DeviceTagStreamRequest, DeviceTagStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.tag.v1.DeviceTagService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceTagRequest, - DeviceTagResponse, - ), - "/arista.tag.v1.DeviceTagService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagStreamRequest, - DeviceTagStreamResponse, - ), - "/arista.tag.v1.DeviceTagService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagStreamRequest, - DeviceTagStreamResponse, - ), - } - - -class DeviceTagAssignmentConfigServiceBase(ServiceBase): - - async def get_one( - self, device_tag_assignment_config_request: "DeviceTagAssignmentConfigRequest" - ) -> "DeviceTagAssignmentConfigResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, - device_tag_assignment_config_stream_request: "DeviceTagAssignmentConfigStreamRequest", - ) -> AsyncIterator["DeviceTagAssignmentConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, - device_tag_assignment_config_stream_request: "DeviceTagAssignmentConfigStreamRequest", - ) -> AsyncIterator["DeviceTagAssignmentConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def set( - self, - device_tag_assignment_config_set_request: "DeviceTagAssignmentConfigSetRequest", - ) -> "DeviceTagAssignmentConfigSetResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete( - self, - device_tag_assignment_config_delete_request: "DeviceTagAssignmentConfigDeleteRequest", - ) -> "DeviceTagAssignmentConfigDeleteResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete_all( - self, - device_tag_assignment_config_delete_all_request: "DeviceTagAssignmentConfigDeleteAllRequest", - ) -> AsyncIterator["DeviceTagAssignmentConfigDeleteAllResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[DeviceTagAssignmentConfigRequest, DeviceTagAssignmentConfigResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[DeviceTagAssignmentConfigStreamRequest, DeviceTagAssignmentConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[DeviceTagAssignmentConfigStreamRequest, DeviceTagAssignmentConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - async def __rpc_set( - self, - stream: "grpclib.server.Stream[DeviceTagAssignmentConfigSetRequest, DeviceTagAssignmentConfigSetResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.set(request) - await stream.send_message(response) - - async def __rpc_delete( - self, - stream: "grpclib.server.Stream[DeviceTagAssignmentConfigDeleteRequest, DeviceTagAssignmentConfigDeleteResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.delete(request) - await stream.send_message(response) - - async def __rpc_delete_all( - self, - stream: "grpclib.server.Stream[DeviceTagAssignmentConfigDeleteAllRequest, DeviceTagAssignmentConfigDeleteAllResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.delete_all, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.tag.v1.DeviceTagAssignmentConfigService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceTagAssignmentConfigRequest, - DeviceTagAssignmentConfigResponse, - ), - "/arista.tag.v1.DeviceTagAssignmentConfigService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagAssignmentConfigStreamRequest, - DeviceTagAssignmentConfigStreamResponse, - ), - "/arista.tag.v1.DeviceTagAssignmentConfigService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagAssignmentConfigStreamRequest, - DeviceTagAssignmentConfigStreamResponse, - ), - "/arista.tag.v1.DeviceTagAssignmentConfigService/Set": grpclib.const.Handler( - self.__rpc_set, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceTagAssignmentConfigSetRequest, - DeviceTagAssignmentConfigSetResponse, - ), - "/arista.tag.v1.DeviceTagAssignmentConfigService/Delete": grpclib.const.Handler( - self.__rpc_delete, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceTagAssignmentConfigDeleteRequest, - DeviceTagAssignmentConfigDeleteResponse, - ), - "/arista.tag.v1.DeviceTagAssignmentConfigService/DeleteAll": grpclib.const.Handler( - self.__rpc_delete_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagAssignmentConfigDeleteAllRequest, - DeviceTagAssignmentConfigDeleteAllResponse, - ), - } - - -class DeviceTagConfigServiceBase(ServiceBase): - - async def get_one( - self, device_tag_config_request: "DeviceTagConfigRequest" - ) -> "DeviceTagConfigResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, device_tag_config_stream_request: "DeviceTagConfigStreamRequest" - ) -> AsyncIterator["DeviceTagConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, device_tag_config_stream_request: "DeviceTagConfigStreamRequest" - ) -> AsyncIterator["DeviceTagConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def set( - self, device_tag_config_set_request: "DeviceTagConfigSetRequest" - ) -> "DeviceTagConfigSetResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete( - self, device_tag_config_delete_request: "DeviceTagConfigDeleteRequest" - ) -> "DeviceTagConfigDeleteResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete_all( - self, device_tag_config_delete_all_request: "DeviceTagConfigDeleteAllRequest" - ) -> AsyncIterator["DeviceTagConfigDeleteAllResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[DeviceTagConfigRequest, DeviceTagConfigResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[DeviceTagConfigStreamRequest, DeviceTagConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[DeviceTagConfigStreamRequest, DeviceTagConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - async def __rpc_set( - self, - stream: "grpclib.server.Stream[DeviceTagConfigSetRequest, DeviceTagConfigSetResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.set(request) - await stream.send_message(response) - - async def __rpc_delete( - self, - stream: "grpclib.server.Stream[DeviceTagConfigDeleteRequest, DeviceTagConfigDeleteResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.delete(request) - await stream.send_message(response) - - async def __rpc_delete_all( - self, - stream: "grpclib.server.Stream[DeviceTagConfigDeleteAllRequest, DeviceTagConfigDeleteAllResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.delete_all, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.tag.v1.DeviceTagConfigService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceTagConfigRequest, - DeviceTagConfigResponse, - ), - "/arista.tag.v1.DeviceTagConfigService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagConfigStreamRequest, - DeviceTagConfigStreamResponse, - ), - "/arista.tag.v1.DeviceTagConfigService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagConfigStreamRequest, - DeviceTagConfigStreamResponse, - ), - "/arista.tag.v1.DeviceTagConfigService/Set": grpclib.const.Handler( - self.__rpc_set, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceTagConfigSetRequest, - DeviceTagConfigSetResponse, - ), - "/arista.tag.v1.DeviceTagConfigService/Delete": grpclib.const.Handler( - self.__rpc_delete, - grpclib.const.Cardinality.UNARY_UNARY, - DeviceTagConfigDeleteRequest, - DeviceTagConfigDeleteResponse, - ), - "/arista.tag.v1.DeviceTagConfigService/DeleteAll": grpclib.const.Handler( - self.__rpc_delete_all, - grpclib.const.Cardinality.UNARY_STREAM, - DeviceTagConfigDeleteAllRequest, - DeviceTagConfigDeleteAllResponse, - ), - } - - -class InterfaceTagServiceBase(ServiceBase): - - async def get_one( - self, interface_tag_request: "InterfaceTagRequest" - ) -> "InterfaceTagResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, interface_tag_stream_request: "InterfaceTagStreamRequest" - ) -> AsyncIterator["InterfaceTagStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, interface_tag_stream_request: "InterfaceTagStreamRequest" - ) -> AsyncIterator["InterfaceTagStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, stream: "grpclib.server.Stream[InterfaceTagRequest, InterfaceTagResponse]" - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[InterfaceTagStreamRequest, InterfaceTagStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[InterfaceTagStreamRequest, InterfaceTagStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.tag.v1.InterfaceTagService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - InterfaceTagRequest, - InterfaceTagResponse, - ), - "/arista.tag.v1.InterfaceTagService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagStreamRequest, - InterfaceTagStreamResponse, - ), - "/arista.tag.v1.InterfaceTagService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagStreamRequest, - InterfaceTagStreamResponse, - ), - } - - -class InterfaceTagAssignmentConfigServiceBase(ServiceBase): - - async def get_one( - self, - interface_tag_assignment_config_request: "InterfaceTagAssignmentConfigRequest", - ) -> "InterfaceTagAssignmentConfigResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, - interface_tag_assignment_config_stream_request: "InterfaceTagAssignmentConfigStreamRequest", - ) -> AsyncIterator["InterfaceTagAssignmentConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, - interface_tag_assignment_config_stream_request: "InterfaceTagAssignmentConfigStreamRequest", - ) -> AsyncIterator["InterfaceTagAssignmentConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def set( - self, - interface_tag_assignment_config_set_request: "InterfaceTagAssignmentConfigSetRequest", - ) -> "InterfaceTagAssignmentConfigSetResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete( - self, - interface_tag_assignment_config_delete_request: "InterfaceTagAssignmentConfigDeleteRequest", - ) -> "InterfaceTagAssignmentConfigDeleteResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete_all( - self, - interface_tag_assignment_config_delete_all_request: "InterfaceTagAssignmentConfigDeleteAllRequest", - ) -> AsyncIterator["InterfaceTagAssignmentConfigDeleteAllResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[InterfaceTagAssignmentConfigRequest, InterfaceTagAssignmentConfigResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[InterfaceTagAssignmentConfigStreamRequest, InterfaceTagAssignmentConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[InterfaceTagAssignmentConfigStreamRequest, InterfaceTagAssignmentConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - async def __rpc_set( - self, - stream: "grpclib.server.Stream[InterfaceTagAssignmentConfigSetRequest, InterfaceTagAssignmentConfigSetResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.set(request) - await stream.send_message(response) - - async def __rpc_delete( - self, - stream: "grpclib.server.Stream[InterfaceTagAssignmentConfigDeleteRequest, InterfaceTagAssignmentConfigDeleteResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.delete(request) - await stream.send_message(response) - - async def __rpc_delete_all( - self, - stream: "grpclib.server.Stream[InterfaceTagAssignmentConfigDeleteAllRequest, InterfaceTagAssignmentConfigDeleteAllResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.delete_all, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.tag.v1.InterfaceTagAssignmentConfigService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - InterfaceTagAssignmentConfigRequest, - InterfaceTagAssignmentConfigResponse, - ), - "/arista.tag.v1.InterfaceTagAssignmentConfigService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagAssignmentConfigStreamRequest, - InterfaceTagAssignmentConfigStreamResponse, - ), - "/arista.tag.v1.InterfaceTagAssignmentConfigService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagAssignmentConfigStreamRequest, - InterfaceTagAssignmentConfigStreamResponse, - ), - "/arista.tag.v1.InterfaceTagAssignmentConfigService/Set": grpclib.const.Handler( - self.__rpc_set, - grpclib.const.Cardinality.UNARY_UNARY, - InterfaceTagAssignmentConfigSetRequest, - InterfaceTagAssignmentConfigSetResponse, - ), - "/arista.tag.v1.InterfaceTagAssignmentConfigService/Delete": grpclib.const.Handler( - self.__rpc_delete, - grpclib.const.Cardinality.UNARY_UNARY, - InterfaceTagAssignmentConfigDeleteRequest, - InterfaceTagAssignmentConfigDeleteResponse, - ), - "/arista.tag.v1.InterfaceTagAssignmentConfigService/DeleteAll": grpclib.const.Handler( - self.__rpc_delete_all, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagAssignmentConfigDeleteAllRequest, - InterfaceTagAssignmentConfigDeleteAllResponse, - ), - } - - -class InterfaceTagConfigServiceBase(ServiceBase): - - async def get_one( - self, interface_tag_config_request: "InterfaceTagConfigRequest" - ) -> "InterfaceTagConfigResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def get_all( - self, interface_tag_config_stream_request: "InterfaceTagConfigStreamRequest" - ) -> AsyncIterator["InterfaceTagConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def subscribe( - self, interface_tag_config_stream_request: "InterfaceTagConfigStreamRequest" - ) -> AsyncIterator["InterfaceTagConfigStreamResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def set( - self, interface_tag_config_set_request: "InterfaceTagConfigSetRequest" - ) -> "InterfaceTagConfigSetResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete( - self, interface_tag_config_delete_request: "InterfaceTagConfigDeleteRequest" - ) -> "InterfaceTagConfigDeleteResponse": - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def delete_all( - self, - interface_tag_config_delete_all_request: "InterfaceTagConfigDeleteAllRequest", - ) -> AsyncIterator["InterfaceTagConfigDeleteAllResponse"]: - raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - - async def __rpc_get_one( - self, - stream: "grpclib.server.Stream[InterfaceTagConfigRequest, InterfaceTagConfigResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.get_one(request) - await stream.send_message(response) - - async def __rpc_get_all( - self, - stream: "grpclib.server.Stream[InterfaceTagConfigStreamRequest, InterfaceTagConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.get_all, - stream, - request, - ) - - async def __rpc_subscribe( - self, - stream: "grpclib.server.Stream[InterfaceTagConfigStreamRequest, InterfaceTagConfigStreamResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.subscribe, - stream, - request, - ) - - async def __rpc_set( - self, - stream: "grpclib.server.Stream[InterfaceTagConfigSetRequest, InterfaceTagConfigSetResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.set(request) - await stream.send_message(response) - - async def __rpc_delete( - self, - stream: "grpclib.server.Stream[InterfaceTagConfigDeleteRequest, InterfaceTagConfigDeleteResponse]", - ) -> None: - request = await stream.recv_message() - response = await self.delete(request) - await stream.send_message(response) - - async def __rpc_delete_all( - self, - stream: "grpclib.server.Stream[InterfaceTagConfigDeleteAllRequest, InterfaceTagConfigDeleteAllResponse]", - ) -> None: - request = await stream.recv_message() - await self._call_rpc_handler_server_stream( - self.delete_all, - stream, - request, - ) - - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: - return { - "/arista.tag.v1.InterfaceTagConfigService/GetOne": grpclib.const.Handler( - self.__rpc_get_one, - grpclib.const.Cardinality.UNARY_UNARY, - InterfaceTagConfigRequest, - InterfaceTagConfigResponse, - ), - "/arista.tag.v1.InterfaceTagConfigService/GetAll": grpclib.const.Handler( - self.__rpc_get_all, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagConfigStreamRequest, - InterfaceTagConfigStreamResponse, - ), - "/arista.tag.v1.InterfaceTagConfigService/Subscribe": grpclib.const.Handler( - self.__rpc_subscribe, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagConfigStreamRequest, - InterfaceTagConfigStreamResponse, - ), - "/arista.tag.v1.InterfaceTagConfigService/Set": grpclib.const.Handler( - self.__rpc_set, - grpclib.const.Cardinality.UNARY_UNARY, - InterfaceTagConfigSetRequest, - InterfaceTagConfigSetResponse, - ), - "/arista.tag.v1.InterfaceTagConfigService/Delete": grpclib.const.Handler( - self.__rpc_delete, - grpclib.const.Cardinality.UNARY_UNARY, - InterfaceTagConfigDeleteRequest, - InterfaceTagConfigDeleteResponse, - ), - "/arista.tag.v1.InterfaceTagConfigService/DeleteAll": grpclib.const.Handler( - self.__rpc_delete_all, - grpclib.const.Cardinality.UNARY_STREAM, - InterfaceTagConfigDeleteAllRequest, - InterfaceTagConfigDeleteAllResponse, - ), - } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/mocked_classes.py b/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/mocked_classes.py deleted file mode 100644 index bcff2c9b077..00000000000 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/mocked_classes.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright (c) 2023-2024 Arista Networks, Inc. -# Use of this source code is governed by the Apache License 2.0 -# that can be found in the LICENSE file. -""" -Dummy class for mock aristaproto. Required to pass ansible sanity tests. -Once things move away from the ansible collection this can be removed again. -""" - -ARISTAPROTO_ATTRIBUTES = [ - "Casing", - "DATETIME_ZERO", - "Enum", - "FIXED_TYPES", - "FieldMetadata", - "INFINITY", - "INT_64_TYPES", - "Message", - "NAN", - "NEG_INFINITY", - "PACKED_TYPES", - "PLACEHOLDER", - "ParsedField", - "ProtoClassMetadata", - "SIZE_DELIMITED", - "ServiceStub", - "TYPE_BOOL", - "TYPE_BYTES", - "TYPE_DOUBLE", - "TYPE_ENUM", - "TYPE_FIXED32", - "TYPE_FIXED64", - "TYPE_FLOAT", - "TYPE_INT32", - "TYPE_INT64", - "TYPE_MAP", - "TYPE_MESSAGE", - "TYPE_SFIXED32", - "TYPE_SFIXED64", - "TYPE_SINT32", - "TYPE_SINT64", - "TYPE_STRING", - "TYPE_UINT32", - "TYPE_UINT64", - "WIRE_FIXED_32", - "WIRE_FIXED_32_TYPES", - "WIRE_FIXED_64", - "WIRE_FIXED_64_TYPES", - "WIRE_LEN_DELIM", - "WIRE_LEN_DELIM_TYPES", - "WIRE_VARINT", - "WIRE_VARINT_TYPES", - "bool_field", - "bytes_field", - "casing", - "datetime_default_gen", - "decode_varint", - "double_field", - "dump_varint", - "encode_varint", - "enum_field", - "fixed32_field", - "fixed64_field", - "float_field", - "int32_field", - "int64_field", - "load_fields", - "load_varint", - "map_field", - "message_field", - "parse_fields", - "serialized_on_wire", - "sfixed32_field", - "sfixed64_field", - "sint32_field", - "sint64_field", - "size_varint", - "string_field", - "uint32_field", - "uint64_field", - "which_one_of", -] - - -class mocked_aristaproto: - pass - - -class mocked_grpclib: - class const: - class Handler: - pass - - -def dummy_callable(*args, **kwargs): - pass - - -for attribute in ARISTAPROTO_ATTRIBUTES: - if attribute.isupper(): - setattr(mocked_aristaproto, attribute, "dummyvalue") - elif attribute[:1].isupper(): - setattr(mocked_aristaproto, attribute, object) - else: - setattr(mocked_aristaproto, attribute, dummy_callable) diff --git a/ansible_collections/arista/avd/pylintrc b/ansible_collections/arista/avd/pylintrc index 488ddd038aa..90361c656f2 100644 --- a/ansible_collections/arista/avd/pylintrc +++ b/ansible_collections/arista/avd/pylintrc @@ -1,11 +1,6 @@ # Configuration applied to collection by ansible-test # https://github.com/ansible/ansible/blob/devel/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg -[MASTER] -ignore-paths= - # The cv_client api is generated from proto files, so it should not be linted. - plugins/plugin_utils/cv_client/api/ - [MESSAGES CONTROL] disable= abstract-method, diff --git a/pylintrc b/pylintrc index d53f99236b9..164564b0dc7 100644 --- a/pylintrc +++ b/pylintrc @@ -1,8 +1,9 @@ [MASTER] ignore-paths= # We will Pylint for ansible_collections separately since it uses it's own pylintrc in the collection root. - ansible_collections - + ansible_collections, + # The cv_client api is generated from proto files, so it should not be linted. + python-avd/pyavd/_cv/api [MESSAGES CONTROL] disable= diff --git a/pyproject.toml b/pyproject.toml index ba5da727276..eb07b591e09 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,12 @@ [tool.black] line-length = 160 -force-exclude = '''ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/.*''' +force-exclude = '''python-avd/pyavd/_cv/api/.*''' [tool.isort] extend_skip_glob = [ - "ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/**/*" + "python-avd/pyavd/_cv/api/**/*" ] profile = "black" diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/Makefile b/python-avd/pyavd/_cv/Makefile similarity index 51% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/Makefile rename to python-avd/pyavd/_cv/Makefile index d726ace6e8a..c095b949d36 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/Makefile +++ b/python-avd/pyavd/_cv/Makefile @@ -16,11 +16,6 @@ PYTHON ?= python3 ALL_PROTOS=$(shell find $(VENDORPATH) -name "*.proto") -IMPORTS_TO_TRY_EXCEPT_1 = import aristaproto\nimport grpclib\nfrom aristaproto\.grpc\.grpclib_server import ServiceBase -TRY_EXCEPTED_IMPORTS_1 = try:\n import aristaproto\n import grpclib\n from aristaproto.grpc.grpclib_server import ServiceBase\nexcept ImportError:\n HAS_ARISTAPROTO = False\n from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto\n from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib\n ServiceBase = object\nelse:\n HAS_ARISTAPROTO = True -IMPORTS_TO_TRY_EXCEPT_2 = ^import aristaproto -TRY_EXCEPTED_IMPORTS_2 = try:\n import aristaproto\nexcept ImportError:\n HAS_ARISTAPROTO = False\n from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto\nelse:\n HAS_ARISTAPROTO = True - default: api .PHONY: help @@ -30,17 +25,12 @@ help: ## Display help message api-dir: mkdir -p api rm -rf ./api/* + touch ./api/__init__.py api-models: api-dir $(PYTHON) -m grpc_tools.protoc -I $(VENDORPATH) $(EXTRA_DIRS) $(EXTRA_EXTRA_DIRS) --python_aristaproto_out=$(APIPATH) $(ALL_PROTOS) $(EXTRA_PROTOS) $(EXTRA_EXTRA_PROTOS) -api-ansible-sanity-import: api-models -# For most arista protos - find $(APIPATH) -name '*.py' -exec sed -z -i -e 's/$(IMPORTS_TO_TRY_EXCEPT_1)/$(TRY_EXCEPTED_IMPORTS_1)/g' {} + -# For fmp, subscriptions, time proto - find $(APIPATH) -name '*.py' -exec sed -i -e 's/$(IMPORTS_TO_TRY_EXCEPT_2)/$(TRY_EXCEPTED_IMPORTS_2)/g' {} + - -api: api-dir api-models api-ansible-sanity-import ## Build/refresh gRPC bindings in ./api +api: api-dir api-models ## Build/refresh gRPC bindings in ./api @rm -rf $(VENDORPATH) @pre-commit run insert-license --files $(shell find $(APIPATH)/ -name "*.py") || true @echo "--- done" diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/README.txt b/python-avd/pyavd/_cv/README.txt similarity index 64% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/README.txt rename to python-avd/pyavd/_cv/README.txt index c3de66707cb..1d25d15a23a 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/README.txt +++ b/python-avd/pyavd/_cv/README.txt @@ -4,7 +4,5 @@ For now we will maintain it here, as only AVD components rely on it. ./api/ gRPC client bindings for CloudVision Resource APIs using aristaproto (pip3 install aristaproto[compiler]). ./client/ Mid level abstraction of the CloudVision API, to hide some of the suboptimal APIs. ./extra_cv_protos/ Extra proto files inserted into the api during compilation. The APIs here are only supported for use by AVD. -./workflows/ High level workflow abstractions to be moved to pyavd once pyavd becomes a dependency of Ansible-AVD. +./workflows/ High level workflow abstractions. Makefile Makefile to rebuild the ./api/ -mocked_classes.py Mocked aristaproto and grpclib. Required to pass ansible sanity tests. - Once things move away from the ansible collection this can be removed again. diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/__init__.py b/python-avd/pyavd/_cv/__init__.py similarity index 73% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/__init__.py rename to python-avd/pyavd/_cv/__init__.py index 8ea7a0e59d0..e772bee41fe 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/__init__.py +++ b/python-avd/pyavd/_cv/__init__.py @@ -1,6 +1,3 @@ # Copyright (c) 2023-2024 Arista Networks, Inc. # Use of this source code is governed by the Apache License 2.0 # that can be found in the LICENSE file. -from .cv_client import CVClient - -__all__ = ["CVClient"] diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/__init__.py b/python-avd/pyavd/_cv/api/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/__init__.py rename to python-avd/pyavd/_cv/api/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/alert/__init__.py b/python-avd/pyavd/_cv/api/arista/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/alert/__init__.py rename to python-avd/pyavd/_cv/api/arista/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/bugexposure/__init__.py b/python-avd/pyavd/_cv/api/arista/alert/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/bugexposure/__init__.py rename to python-avd/pyavd/_cv/api/arista/alert/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/alert/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/alert/v1/__init__.py similarity index 85% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/alert/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/alert/v1/__init__.py index bec4be2b85a..155ddae5b4f 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/alert/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/alert/v1/__init__.py @@ -7,7 +7,10 @@ # This file has been @generated from dataclasses import dataclass -from datetime import datetime +from datetime import ( + datetime, + timedelta, +) from typing import ( TYPE_CHECKING, AsyncIterator, @@ -16,17 +19,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -1138,6 +1133,11 @@ class Rule(aristaproto.Message): comment: Optional[str] = aristaproto.message_field(4, wraps=aristaproto.TYPE_STRING) """comment is a comment to include that will be displayed in the alert""" + suppress_for: timedelta = aristaproto.message_field(5) + """ + suppress_for is a time duration that a rule will be debounced for after being called + """ + @dataclass(eq=False, repr=False) class Matches(aristaproto.Message): @@ -1954,6 +1954,48 @@ class AlertStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class AlertBatchedStreamRequest(aristaproto.Message): + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Alert at end. + * Each Alert response is fully-specified (all fields set). + * start: Returns the state of each Alert at start, followed by updates until now. + * Each Alert response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Alert at start, followed by updates + until end. + * Each Alert response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class AlertBatchedStreamResponse(aristaproto.Message): + responses: List["AlertStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class AlertConfigRequest(aristaproto.Message): time: datetime = aristaproto.message_field(2) @@ -2023,6 +2065,48 @@ class AlertConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class AlertConfigBatchedStreamRequest(aristaproto.Message): + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each AlertConfig at end. + * Each AlertConfig response is fully-specified (all fields set). + * start: Returns the state of each AlertConfig at start, followed by updates until now. + * Each AlertConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each AlertConfig at start, followed by updates + until end. + * Each AlertConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class AlertConfigBatchedStreamResponse(aristaproto.Message): + responses: List["AlertConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class AlertConfigSetRequest(aristaproto.Message): value: "AlertConfig" = aristaproto.message_field(1) @@ -2156,6 +2240,48 @@ class DefaultTemplateStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class DefaultTemplateBatchedStreamRequest(aristaproto.Message): + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DefaultTemplate at end. + * Each DefaultTemplate response is fully-specified (all fields set). + * start: Returns the state of each DefaultTemplate at start, followed by updates until now. + * Each DefaultTemplate response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DefaultTemplate at start, followed by updates + until end. + * Each DefaultTemplate response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DefaultTemplateBatchedStreamResponse(aristaproto.Message): + responses: List["DefaultTemplateStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class TemplateConfigRequest(aristaproto.Message): key: "TemplateKey" = aristaproto.message_field(1) @@ -2260,6 +2386,48 @@ class TemplateConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class TemplateConfigBatchedStreamRequest(aristaproto.Message): + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each TemplateConfig at end. + * Each TemplateConfig response is fully-specified (all fields set). + * start: Returns the state of each TemplateConfig at start, followed by updates until now. + * Each TemplateConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each TemplateConfig at start, followed by updates + until end. + * Each TemplateConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class TemplateConfigBatchedStreamResponse(aristaproto.Message): + responses: List["TemplateConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class TemplateConfigSetRequest(aristaproto.Message): value: "TemplateConfig" = aristaproto.message_field(1) @@ -2354,7 +2522,10 @@ class TemplateConfigDeleteAllRequest(aristaproto.Message): @dataclass(eq=False, repr=False) class TemplateConfigDeleteAllResponse(aristaproto.Message): type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" + """ + This describes the class of delete error. + A DeleteAllResponse is only sent when there is an error. + """ error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) """This indicates the error message from the delete failure.""" @@ -2440,6 +2611,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + alert_batched_stream_request: "AlertBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AlertBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.AlertService/GetAllBatched", + alert_batched_stream_request, + AlertBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + alert_batched_stream_request: "AlertBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AlertBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.AlertService/SubscribeBatched", + alert_batched_stream_request, + AlertBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class AlertConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -2530,6 +2737,42 @@ async def set( metadata=metadata, ) + async def get_all_batched( + self, + alert_config_batched_stream_request: "AlertConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AlertConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.AlertConfigService/GetAllBatched", + alert_config_batched_stream_request, + AlertConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + alert_config_batched_stream_request: "AlertConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AlertConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.AlertConfigService/SubscribeBatched", + alert_config_batched_stream_request, + AlertConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class DefaultTemplateServiceStub(aristaproto.ServiceStub): async def get_one( @@ -2638,6 +2881,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + default_template_batched_stream_request: "DefaultTemplateBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DefaultTemplateBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.DefaultTemplateService/GetAllBatched", + default_template_batched_stream_request, + DefaultTemplateBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + default_template_batched_stream_request: "DefaultTemplateBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DefaultTemplateBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.DefaultTemplateService/SubscribeBatched", + default_template_batched_stream_request, + DefaultTemplateBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class TemplateConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -2834,6 +3113,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + template_config_batched_stream_request: "TemplateConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["TemplateConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.TemplateConfigService/GetAllBatched", + template_config_batched_stream_request, + TemplateConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + template_config_batched_stream_request: "TemplateConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["TemplateConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.alert.v1.TemplateConfigService/SubscribeBatched", + template_config_batched_stream_request, + TemplateConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class AlertServiceBase(ServiceBase): @@ -2855,6 +3170,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, alert_batched_stream_request: "AlertBatchedStreamRequest" + ) -> AsyncIterator["AlertBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, alert_batched_stream_request: "AlertBatchedStreamRequest" + ) -> AsyncIterator["AlertBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[AlertRequest, AlertResponse]" ) -> None: @@ -2892,6 +3217,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[AlertBatchedStreamRequest, AlertBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[AlertBatchedStreamRequest, AlertBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.alert.v1.AlertService/GetOne": grpclib.const.Handler( @@ -2918,6 +3265,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: AlertStreamRequest, MetaResponse, ), + "/arista.alert.v1.AlertService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AlertBatchedStreamRequest, + AlertBatchedStreamResponse, + ), + "/arista.alert.v1.AlertService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AlertBatchedStreamRequest, + AlertBatchedStreamResponse, + ), } @@ -2948,6 +3307,16 @@ async def set( ) -> "AlertConfigSetResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, alert_config_batched_stream_request: "AlertConfigBatchedStreamRequest" + ) -> AsyncIterator["AlertConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, alert_config_batched_stream_request: "AlertConfigBatchedStreamRequest" + ) -> AsyncIterator["AlertConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[AlertConfigRequest, AlertConfigResponse]" ) -> None: @@ -2995,6 +3364,28 @@ async def __rpc_set( response = await self.set(request) await stream.send_message(response) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[AlertConfigBatchedStreamRequest, AlertConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[AlertConfigBatchedStreamRequest, AlertConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.alert.v1.AlertConfigService/GetOne": grpclib.const.Handler( @@ -3027,6 +3418,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: AlertConfigSetRequest, AlertConfigSetResponse, ), + "/arista.alert.v1.AlertConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AlertConfigBatchedStreamRequest, + AlertConfigBatchedStreamResponse, + ), + "/arista.alert.v1.AlertConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AlertConfigBatchedStreamRequest, + AlertConfigBatchedStreamResponse, + ), } @@ -3062,6 +3465,18 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + default_template_batched_stream_request: "DefaultTemplateBatchedStreamRequest", + ) -> AsyncIterator["DefaultTemplateBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + default_template_batched_stream_request: "DefaultTemplateBatchedStreamRequest", + ) -> AsyncIterator["DefaultTemplateBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[DefaultTemplateRequest, DefaultTemplateResponse]", @@ -3122,6 +3537,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DefaultTemplateBatchedStreamRequest, DefaultTemplateBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DefaultTemplateBatchedStreamRequest, DefaultTemplateBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.alert.v1.DefaultTemplateService/GetOne": grpclib.const.Handler( @@ -3160,6 +3597,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: DefaultTemplateStreamRequest, MetaResponse, ), + "/arista.alert.v1.DefaultTemplateService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DefaultTemplateBatchedStreamRequest, + DefaultTemplateBatchedStreamResponse, + ), + "/arista.alert.v1.DefaultTemplateService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DefaultTemplateBatchedStreamRequest, + DefaultTemplateBatchedStreamResponse, + ), } @@ -3220,6 +3669,18 @@ async def delete_all( ) -> AsyncIterator["TemplateConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + template_config_batched_stream_request: "TemplateConfigBatchedStreamRequest", + ) -> AsyncIterator["TemplateConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + template_config_batched_stream_request: "TemplateConfigBatchedStreamRequest", + ) -> AsyncIterator["TemplateConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[TemplateConfigRequest, TemplateConfigResponse]", @@ -3327,6 +3788,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[TemplateConfigBatchedStreamRequest, TemplateConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[TemplateConfigBatchedStreamRequest, TemplateConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.alert.v1.TemplateConfigService/GetOne": grpclib.const.Handler( @@ -3395,4 +3878,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: TemplateConfigDeleteAllRequest, TemplateConfigDeleteAllResponse, ), + "/arista.alert.v1.TemplateConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + TemplateConfigBatchedStreamRequest, + TemplateConfigBatchedStreamResponse, + ), + "/arista.alert.v1.TemplateConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + TemplateConfigBatchedStreamRequest, + TemplateConfigBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/changecontrol/__init__.py b/python-avd/pyavd/_cv/api/arista/bugexposure/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/changecontrol/__init__.py rename to python-avd/pyavd/_cv/api/arista/bugexposure/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/bugexposure/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/bugexposure/v1/__init__.py similarity index 96% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/bugexposure/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/bugexposure/v1/__init__.py index 52c4d6c5cd3..9954e1e4e1b 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/bugexposure/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/bugexposure/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configlet/__init__.py b/python-avd/pyavd/_cv/api/arista/changecontrol/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configlet/__init__.py rename to python-avd/pyavd/_cv/api/arista/changecontrol/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/changecontrol/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/changecontrol/v1/__init__.py similarity index 82% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/changecontrol/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/changecontrol/v1/__init__.py index 7f4663de08a..7896f101710 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/changecontrol/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/changecontrol/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -631,6 +623,57 @@ class ApproveConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ApproveConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ApproveConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ApproveConfig at end. + * Each ApproveConfig response is fully-specified (all fields set). + * start: Returns the state of each ApproveConfig at start, followed by updates until now. + * Each ApproveConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ApproveConfig at start, followed by updates + until end. + * Each ApproveConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ApproveConfigBatchedStreamResponse(aristaproto.Message): + responses: List["ApproveConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ApproveConfigSetRequest(aristaproto.Message): value: "ApproveConfig" = aristaproto.message_field(1) @@ -865,6 +908,64 @@ class ChangeControlStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ChangeControlBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ChangeControl"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + filter: "Filter" = aristaproto.message_field(2) + """ + For each ChangeControl in the list, all populated fields are considered ANDed together + as a filtering operation. Similarly, the list itself is ORed such that any individual + filter that matches a given ChangeControl is streamed to the user. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ChangeControl at end. + * Each ChangeControl response is fully-specified (all fields set). + * start: Returns the state of each ChangeControl at start, followed by updates until now. + * Each ChangeControl response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ChangeControl at start, followed by updates + until end. + * Each ChangeControl response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ChangeControlBatchedStreamResponse(aristaproto.Message): + responses: List["ChangeControlStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ChangeControlConfigRequest(aristaproto.Message): key: "ChangeControlKey" = aristaproto.message_field(1) @@ -980,6 +1081,57 @@ class ChangeControlConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ChangeControlConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ChangeControlConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ChangeControlConfig at end. + * Each ChangeControlConfig response is fully-specified (all fields set). + * start: Returns the state of each ChangeControlConfig at start, followed by updates until now. + * Each ChangeControlConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ChangeControlConfig at start, followed by updates + until end. + * Each ChangeControlConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ChangeControlConfigBatchedStreamResponse(aristaproto.Message): + responses: List["ChangeControlConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ChangeControlConfigSetRequest(aristaproto.Message): value: "ChangeControlConfig" = aristaproto.message_field(1) @@ -1291,6 +1443,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + approve_config_batched_stream_request: "ApproveConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ApproveConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.changecontrol.v1.ApproveConfigService/GetAllBatched", + approve_config_batched_stream_request, + ApproveConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + approve_config_batched_stream_request: "ApproveConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ApproveConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.changecontrol.v1.ApproveConfigService/SubscribeBatched", + approve_config_batched_stream_request, + ApproveConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ChangeControlServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1399,6 +1587,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + change_control_batched_stream_request: "ChangeControlBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ChangeControlBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.changecontrol.v1.ChangeControlService/GetAllBatched", + change_control_batched_stream_request, + ChangeControlBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + change_control_batched_stream_request: "ChangeControlBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ChangeControlBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.changecontrol.v1.ChangeControlService/SubscribeBatched", + change_control_batched_stream_request, + ChangeControlBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ChangeControlConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1595,6 +1819,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + change_control_config_batched_stream_request: "ChangeControlConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ChangeControlConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.changecontrol.v1.ChangeControlConfigService/GetAllBatched", + change_control_config_batched_stream_request, + ChangeControlConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + change_control_config_batched_stream_request: "ChangeControlConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ChangeControlConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.changecontrol.v1.ChangeControlConfigService/SubscribeBatched", + change_control_config_batched_stream_request, + ChangeControlConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ApproveConfigServiceBase(ServiceBase): @@ -1653,6 +1913,16 @@ async def delete_all( ) -> AsyncIterator["ApproveConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, approve_config_batched_stream_request: "ApproveConfigBatchedStreamRequest" + ) -> AsyncIterator["ApproveConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, approve_config_batched_stream_request: "ApproveConfigBatchedStreamRequest" + ) -> AsyncIterator["ApproveConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ApproveConfigRequest, ApproveConfigResponse]", @@ -1760,6 +2030,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ApproveConfigBatchedStreamRequest, ApproveConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ApproveConfigBatchedStreamRequest, ApproveConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.changecontrol.v1.ApproveConfigService/GetOne": grpclib.const.Handler( @@ -1828,6 +2120,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ApproveConfigDeleteAllRequest, ApproveConfigDeleteAllResponse, ), + "/arista.changecontrol.v1.ApproveConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ApproveConfigBatchedStreamRequest, + ApproveConfigBatchedStreamResponse, + ), + "/arista.changecontrol.v1.ApproveConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ApproveConfigBatchedStreamRequest, + ApproveConfigBatchedStreamResponse, + ), } @@ -1863,6 +2167,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, change_control_batched_stream_request: "ChangeControlBatchedStreamRequest" + ) -> AsyncIterator["ChangeControlBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, change_control_batched_stream_request: "ChangeControlBatchedStreamRequest" + ) -> AsyncIterator["ChangeControlBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ChangeControlRequest, ChangeControlResponse]", @@ -1921,6 +2235,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ChangeControlBatchedStreamRequest, ChangeControlBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ChangeControlBatchedStreamRequest, ChangeControlBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.changecontrol.v1.ChangeControlService/GetOne": grpclib.const.Handler( @@ -1959,6 +2295,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ChangeControlStreamRequest, MetaResponse, ), + "/arista.changecontrol.v1.ChangeControlService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ChangeControlBatchedStreamRequest, + ChangeControlBatchedStreamResponse, + ), + "/arista.changecontrol.v1.ChangeControlService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ChangeControlBatchedStreamRequest, + ChangeControlBatchedStreamResponse, + ), } @@ -2022,6 +2370,18 @@ async def delete_all( ) -> AsyncIterator["ChangeControlConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + change_control_config_batched_stream_request: "ChangeControlConfigBatchedStreamRequest", + ) -> AsyncIterator["ChangeControlConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + change_control_config_batched_stream_request: "ChangeControlConfigBatchedStreamRequest", + ) -> AsyncIterator["ChangeControlConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ChangeControlConfigRequest, ChangeControlConfigResponse]", @@ -2131,6 +2491,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ChangeControlConfigBatchedStreamRequest, ChangeControlConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ChangeControlConfigBatchedStreamRequest, ChangeControlConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.changecontrol.v1.ChangeControlConfigService/GetOne": grpclib.const.Handler( @@ -2199,4 +2581,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ChangeControlConfigDeleteAllRequest, ChangeControlConfigDeleteAllResponse, ), + "/arista.changecontrol.v1.ChangeControlConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ChangeControlConfigBatchedStreamRequest, + ChangeControlConfigBatchedStreamResponse, + ), + "/arista.changecontrol.v1.ChangeControlConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ChangeControlConfigBatchedStreamRequest, + ChangeControlConfigBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configstatus/__init__.py b/python-avd/pyavd/_cv/api/arista/configlet/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configstatus/__init__.py rename to python-avd/pyavd/_cv/api/arista/configlet/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configlet/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/configlet/v1/__init__.py similarity index 78% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configlet/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/configlet/v1/__init__.py index e1ecf1305bf..5023b4e4425 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configlet/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/configlet/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -133,6 +125,14 @@ class Configlet(aristaproto.Message): ) """last_modified_by is the user who last modified the Configlet.""" + digest: Optional[str] = aristaproto.message_field(10, wraps=aristaproto.TYPE_STRING) + """ + digest is the sha256 hash of the configlet body encoded in hexadecimal. + """ + + size: Optional[int] = aristaproto.message_field(11, wraps=aristaproto.TYPE_INT64) + """size of configlet body in bytes.""" + @dataclass(eq=False, repr=False) class ConfigletConfig(aristaproto.Message): @@ -416,6 +416,57 @@ class ConfigletStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ConfigletBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Configlet"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Configlet at end. + * Each Configlet response is fully-specified (all fields set). + * start: Returns the state of each Configlet at start, followed by updates until now. + * Each Configlet response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Configlet at start, followed by updates + until end. + * Each Configlet response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ConfigletBatchedStreamResponse(aristaproto.Message): + responses: List["ConfigletStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ConfigletAssignmentRequest(aristaproto.Message): key: "ConfigletAssignmentKey" = aristaproto.message_field(1) @@ -531,6 +582,57 @@ class ConfigletAssignmentStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ConfigletAssignmentBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ConfigletAssignment"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ConfigletAssignment at end. + * Each ConfigletAssignment response is fully-specified (all fields set). + * start: Returns the state of each ConfigletAssignment at start, followed by updates until now. + * Each ConfigletAssignment response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ConfigletAssignment at start, followed by updates + until end. + * Each ConfigletAssignment response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ConfigletAssignmentBatchedStreamResponse(aristaproto.Message): + responses: List["ConfigletAssignmentStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ConfigletAssignmentConfigRequest(aristaproto.Message): key: "ConfigletAssignmentKey" = aristaproto.message_field(1) @@ -646,6 +748,59 @@ class ConfigletAssignmentConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ConfigletAssignmentConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ConfigletAssignmentConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ConfigletAssignmentConfig at end. + * Each ConfigletAssignmentConfig response is fully-specified (all fields set). + * start: Returns the state of each ConfigletAssignmentConfig at start, followed by updates until now. + * Each ConfigletAssignmentConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ConfigletAssignmentConfig at start, followed by updates + until end. + * Each ConfigletAssignmentConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ConfigletAssignmentConfigBatchedStreamResponse(aristaproto.Message): + responses: List["ConfigletAssignmentConfigStreamResponse"] = ( + aristaproto.message_field(1) + ) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ConfigletAssignmentConfigSetRequest(aristaproto.Message): value: "ConfigletAssignmentConfig" = aristaproto.message_field(1) @@ -736,13 +891,21 @@ class ConfigletAssignmentConfigDeleteSomeResponse(aristaproto.Message): @dataclass(eq=False, repr=False) class ConfigletAssignmentConfigDeleteAllRequest(aristaproto.Message): - pass + partial_eq_filter: List["ConfigletAssignmentConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a DeleteAll. + This requires all provided fields to be equal to the response. + A filtered DeleteAll will use GetAll with filter to find things to delete. + """ @dataclass(eq=False, repr=False) class ConfigletAssignmentConfigDeleteAllResponse(aristaproto.Message): type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" + """ + This describes the class of delete error. + A DeleteAllResponse is only sent when there is an error. + """ error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) """This indicates the error message from the delete failure.""" @@ -871,6 +1034,57 @@ class ConfigletConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ConfigletConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ConfigletConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ConfigletConfig at end. + * Each ConfigletConfig response is fully-specified (all fields set). + * start: Returns the state of each ConfigletConfig at start, followed by updates until now. + * Each ConfigletConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ConfigletConfig at start, followed by updates + until end. + * Each ConfigletConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ConfigletConfigBatchedStreamResponse(aristaproto.Message): + responses: List["ConfigletConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ConfigletConfigSetRequest(aristaproto.Message): value: "ConfigletConfig" = aristaproto.message_field(1) @@ -959,13 +1173,21 @@ class ConfigletConfigDeleteSomeResponse(aristaproto.Message): @dataclass(eq=False, repr=False) class ConfigletConfigDeleteAllRequest(aristaproto.Message): - pass + partial_eq_filter: List["ConfigletConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a DeleteAll. + This requires all provided fields to be equal to the response. + A filtered DeleteAll will use GetAll with filter to find things to delete. + """ @dataclass(eq=False, repr=False) class ConfigletConfigDeleteAllResponse(aristaproto.Message): type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" + """ + This describes the class of delete error. + A DeleteAllResponse is only sent when there is an error. + """ error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) """This indicates the error message from the delete failure.""" @@ -1086,6 +1308,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + configlet_batched_stream_request: "ConfigletBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletService/GetAllBatched", + configlet_batched_stream_request, + ConfigletBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + configlet_batched_stream_request: "ConfigletBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletService/SubscribeBatched", + configlet_batched_stream_request, + ConfigletBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ConfigletAssignmentServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1194,6 +1452,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + configlet_assignment_batched_stream_request: "ConfigletAssignmentBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletAssignmentBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletAssignmentService/GetAllBatched", + configlet_assignment_batched_stream_request, + ConfigletAssignmentBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + configlet_assignment_batched_stream_request: "ConfigletAssignmentBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletAssignmentBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletAssignmentService/SubscribeBatched", + configlet_assignment_batched_stream_request, + ConfigletAssignmentBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ConfigletAssignmentConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1390,6 +1684,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + configlet_assignment_config_batched_stream_request: "ConfigletAssignmentConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletAssignmentConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletAssignmentConfigService/GetAllBatched", + configlet_assignment_config_batched_stream_request, + ConfigletAssignmentConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + configlet_assignment_config_batched_stream_request: "ConfigletAssignmentConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletAssignmentConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletAssignmentConfigService/SubscribeBatched", + configlet_assignment_config_batched_stream_request, + ConfigletAssignmentConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ConfigletConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1586,6 +1916,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + configlet_config_batched_stream_request: "ConfigletConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletConfigService/GetAllBatched", + configlet_config_batched_stream_request, + ConfigletConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + configlet_config_batched_stream_request: "ConfigletConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigletConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configlet.v1.ConfigletConfigService/SubscribeBatched", + configlet_config_batched_stream_request, + ConfigletConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ConfigletServiceBase(ServiceBase): @@ -1619,6 +1985,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, configlet_batched_stream_request: "ConfigletBatchedStreamRequest" + ) -> AsyncIterator["ConfigletBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, configlet_batched_stream_request: "ConfigletBatchedStreamRequest" + ) -> AsyncIterator["ConfigletBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ConfigletRequest, ConfigletResponse]" ) -> None: @@ -1676,6 +2052,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ConfigletBatchedStreamRequest, ConfigletBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ConfigletBatchedStreamRequest, ConfigletBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configlet.v1.ConfigletService/GetOne": grpclib.const.Handler( @@ -1714,6 +2112,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ConfigletStreamRequest, MetaResponse, ), + "/arista.configlet.v1.ConfigletService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletBatchedStreamRequest, + ConfigletBatchedStreamResponse, + ), + "/arista.configlet.v1.ConfigletService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletBatchedStreamRequest, + ConfigletBatchedStreamResponse, + ), } @@ -1749,6 +2159,18 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + configlet_assignment_batched_stream_request: "ConfigletAssignmentBatchedStreamRequest", + ) -> AsyncIterator["ConfigletAssignmentBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + configlet_assignment_batched_stream_request: "ConfigletAssignmentBatchedStreamRequest", + ) -> AsyncIterator["ConfigletAssignmentBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ConfigletAssignmentRequest, ConfigletAssignmentResponse]", @@ -1809,6 +2231,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ConfigletAssignmentBatchedStreamRequest, ConfigletAssignmentBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ConfigletAssignmentBatchedStreamRequest, ConfigletAssignmentBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configlet.v1.ConfigletAssignmentService/GetOne": grpclib.const.Handler( @@ -1847,6 +2291,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ConfigletAssignmentStreamRequest, MetaResponse, ), + "/arista.configlet.v1.ConfigletAssignmentService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletAssignmentBatchedStreamRequest, + ConfigletAssignmentBatchedStreamResponse, + ), + "/arista.configlet.v1.ConfigletAssignmentService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletAssignmentBatchedStreamRequest, + ConfigletAssignmentBatchedStreamResponse, + ), } @@ -1917,6 +2373,18 @@ async def delete_all( ) -> AsyncIterator["ConfigletAssignmentConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + configlet_assignment_config_batched_stream_request: "ConfigletAssignmentConfigBatchedStreamRequest", + ) -> AsyncIterator["ConfigletAssignmentConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + configlet_assignment_config_batched_stream_request: "ConfigletAssignmentConfigBatchedStreamRequest", + ) -> AsyncIterator["ConfigletAssignmentConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ConfigletAssignmentConfigRequest, ConfigletAssignmentConfigResponse]", @@ -2026,6 +2494,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ConfigletAssignmentConfigBatchedStreamRequest, ConfigletAssignmentConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ConfigletAssignmentConfigBatchedStreamRequest, ConfigletAssignmentConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configlet.v1.ConfigletAssignmentConfigService/GetOne": grpclib.const.Handler( @@ -2094,6 +2584,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ConfigletAssignmentConfigDeleteAllRequest, ConfigletAssignmentConfigDeleteAllResponse, ), + "/arista.configlet.v1.ConfigletAssignmentConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletAssignmentConfigBatchedStreamRequest, + ConfigletAssignmentConfigBatchedStreamResponse, + ), + "/arista.configlet.v1.ConfigletAssignmentConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletAssignmentConfigBatchedStreamRequest, + ConfigletAssignmentConfigBatchedStreamResponse, + ), } @@ -2154,6 +2656,18 @@ async def delete_all( ) -> AsyncIterator["ConfigletConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + configlet_config_batched_stream_request: "ConfigletConfigBatchedStreamRequest", + ) -> AsyncIterator["ConfigletConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + configlet_config_batched_stream_request: "ConfigletConfigBatchedStreamRequest", + ) -> AsyncIterator["ConfigletConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ConfigletConfigRequest, ConfigletConfigResponse]", @@ -2263,6 +2777,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ConfigletConfigBatchedStreamRequest, ConfigletConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ConfigletConfigBatchedStreamRequest, ConfigletConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configlet.v1.ConfigletConfigService/GetOne": grpclib.const.Handler( @@ -2331,4 +2867,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ConfigletConfigDeleteAllRequest, ConfigletConfigDeleteAllResponse, ), + "/arista.configlet.v1.ConfigletConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletConfigBatchedStreamRequest, + ConfigletConfigBatchedStreamResponse, + ), + "/arista.configlet.v1.ConfigletConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigletConfigBatchedStreamRequest, + ConfigletConfigBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/connectivitymonitor/__init__.py b/python-avd/pyavd/_cv/api/arista/configstatus/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/connectivitymonitor/__init__.py rename to python-avd/pyavd/_cv/api/arista/configstatus/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configstatus/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/configstatus/v1/__init__.py similarity index 73% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configstatus/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/configstatus/v1/__init__.py index 4de1d2355d4..cfa3b305e6a 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/configstatus/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/configstatus/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from ... import ( subscriptions as __subscriptions__, @@ -624,6 +616,57 @@ class ConfigDiffStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ConfigDiffBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ConfigDiff"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ConfigDiff at end. + * Each ConfigDiff response is fully-specified (all fields set). + * start: Returns the state of each ConfigDiff at start, followed by updates until now. + * Each ConfigDiff response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ConfigDiff at start, followed by updates + until end. + * Each ConfigDiff response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ConfigDiffBatchedStreamResponse(aristaproto.Message): + responses: List["ConfigDiffStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ConfigurationRequest(aristaproto.Message): key: "ConfigKey" = aristaproto.message_field(1) @@ -737,6 +780,57 @@ class ConfigurationStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ConfigurationBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Configuration"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Configuration at end. + * Each Configuration response is fully-specified (all fields set). + * start: Returns the state of each Configuration at start, followed by updates until now. + * Each Configuration response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Configuration at start, followed by updates + until end. + * Each Configuration response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ConfigurationBatchedStreamResponse(aristaproto.Message): + responses: List["ConfigurationStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class SecurityProfileRequest(aristaproto.Message): key: "ConfigKey" = aristaproto.message_field(1) @@ -852,6 +946,57 @@ class SecurityProfileStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SecurityProfileBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["SecurityProfile"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each SecurityProfile at end. + * Each SecurityProfile response is fully-specified (all fields set). + * start: Returns the state of each SecurityProfile at start, followed by updates until now. + * Each SecurityProfile response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each SecurityProfile at start, followed by updates + until end. + * Each SecurityProfile response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class SecurityProfileBatchedStreamResponse(aristaproto.Message): + responses: List["SecurityProfileStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class SecurityProfileDiffRequest(aristaproto.Message): key: "ConfigDiffKey" = aristaproto.message_field(1) @@ -967,6 +1112,57 @@ class SecurityProfileDiffStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SecurityProfileDiffBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["SecurityProfileDiff"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each SecurityProfileDiff at end. + * Each SecurityProfileDiff response is fully-specified (all fields set). + * start: Returns the state of each SecurityProfileDiff at start, followed by updates until now. + * Each SecurityProfileDiff response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each SecurityProfileDiff at start, followed by updates + until end. + * Each SecurityProfileDiff response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class SecurityProfileDiffBatchedStreamResponse(aristaproto.Message): + responses: List["SecurityProfileDiffStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class SecurityProfileDiffSummaryRequest(aristaproto.Message): key: "SummaryKey" = aristaproto.message_field(1) @@ -1082,6 +1278,59 @@ class SecurityProfileDiffSummaryStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SecurityProfileDiffSummaryBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["SecurityProfileDiffSummary"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each SecurityProfileDiffSummary at end. + * Each SecurityProfileDiffSummary response is fully-specified (all fields set). + * start: Returns the state of each SecurityProfileDiffSummary at start, followed by updates until now. + * Each SecurityProfileDiffSummary response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each SecurityProfileDiffSummary at start, followed by updates + until end. + * Each SecurityProfileDiffSummary response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class SecurityProfileDiffSummaryBatchedStreamResponse(aristaproto.Message): + responses: List["SecurityProfileDiffSummaryStreamResponse"] = ( + aristaproto.message_field(1) + ) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class SummaryRequest(aristaproto.Message): key: "SummaryKey" = aristaproto.message_field(1) @@ -1195,6 +1444,57 @@ class SummaryStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SummaryBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Summary"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Summary at end. + * Each Summary response is fully-specified (all fields set). + * start: Returns the state of each Summary at start, followed by updates until now. + * Each Summary response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Summary at start, followed by updates + until end. + * Each Summary response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class SummaryBatchedStreamResponse(aristaproto.Message): + responses: List["SummaryStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + class ConfigDiffServiceStub(aristaproto.ServiceStub): async def get_one( self, @@ -1302,6 +1602,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + config_diff_batched_stream_request: "ConfigDiffBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigDiffBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.ConfigDiffService/GetAllBatched", + config_diff_batched_stream_request, + ConfigDiffBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + config_diff_batched_stream_request: "ConfigDiffBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigDiffBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.ConfigDiffService/SubscribeBatched", + config_diff_batched_stream_request, + ConfigDiffBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ConfigurationServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1390,20 +1726,56 @@ async def get_meta( timeout=timeout, deadline=deadline, metadata=metadata, - ) + ) + + async def subscribe_meta( + self, + configuration_stream_request: "ConfigurationStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.ConfigurationService/SubscribeMeta", + configuration_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + configuration_batched_stream_request: "ConfigurationBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ConfigurationBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.ConfigurationService/GetAllBatched", + configuration_batched_stream_request, + ConfigurationBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response - async def subscribe_meta( + async def subscribe_batched( self, - configuration_stream_request: "ConfigurationStreamRequest", + configuration_batched_stream_request: "ConfigurationBatchedStreamRequest", *, timeout: Optional[float] = None, deadline: Optional["Deadline"] = None, metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["MetaResponse"]: + ) -> AsyncIterator["ConfigurationBatchedStreamResponse"]: async for response in self._unary_stream( - "/arista.configstatus.v1.ConfigurationService/SubscribeMeta", - configuration_stream_request, - MetaResponse, + "/arista.configstatus.v1.ConfigurationService/SubscribeBatched", + configuration_batched_stream_request, + ConfigurationBatchedStreamResponse, timeout=timeout, deadline=deadline, metadata=metadata, @@ -1518,6 +1890,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + security_profile_batched_stream_request: "SecurityProfileBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecurityProfileBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SecurityProfileService/GetAllBatched", + security_profile_batched_stream_request, + SecurityProfileBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + security_profile_batched_stream_request: "SecurityProfileBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecurityProfileBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SecurityProfileService/SubscribeBatched", + security_profile_batched_stream_request, + SecurityProfileBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class SecurityProfileDiffServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1626,6 +2034,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + security_profile_diff_batched_stream_request: "SecurityProfileDiffBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecurityProfileDiffBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SecurityProfileDiffService/GetAllBatched", + security_profile_diff_batched_stream_request, + SecurityProfileDiffBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + security_profile_diff_batched_stream_request: "SecurityProfileDiffBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecurityProfileDiffBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SecurityProfileDiffService/SubscribeBatched", + security_profile_diff_batched_stream_request, + SecurityProfileDiffBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class SecurityProfileDiffSummaryServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1734,6 +2178,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + security_profile_diff_summary_batched_stream_request: "SecurityProfileDiffSummaryBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecurityProfileDiffSummaryBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SecurityProfileDiffSummaryService/GetAllBatched", + security_profile_diff_summary_batched_stream_request, + SecurityProfileDiffSummaryBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + security_profile_diff_summary_batched_stream_request: "SecurityProfileDiffSummaryBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecurityProfileDiffSummaryBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SecurityProfileDiffSummaryService/SubscribeBatched", + security_profile_diff_summary_batched_stream_request, + SecurityProfileDiffSummaryBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class SummaryServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1842,6 +2322,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + summary_batched_stream_request: "SummaryBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SummaryService/GetAllBatched", + summary_batched_stream_request, + SummaryBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + summary_batched_stream_request: "SummaryBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.configstatus.v1.SummaryService/SubscribeBatched", + summary_batched_stream_request, + SummaryBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ConfigDiffServiceBase(ServiceBase): @@ -1875,6 +2391,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, config_diff_batched_stream_request: "ConfigDiffBatchedStreamRequest" + ) -> AsyncIterator["ConfigDiffBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, config_diff_batched_stream_request: "ConfigDiffBatchedStreamRequest" + ) -> AsyncIterator["ConfigDiffBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ConfigDiffRequest, ConfigDiffResponse]" ) -> None: @@ -1932,6 +2458,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ConfigDiffBatchedStreamRequest, ConfigDiffBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ConfigDiffBatchedStreamRequest, ConfigDiffBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configstatus.v1.ConfigDiffService/GetOne": grpclib.const.Handler( @@ -1970,6 +2518,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ConfigDiffStreamRequest, MetaResponse, ), + "/arista.configstatus.v1.ConfigDiffService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigDiffBatchedStreamRequest, + ConfigDiffBatchedStreamResponse, + ), + "/arista.configstatus.v1.ConfigDiffService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigDiffBatchedStreamRequest, + ConfigDiffBatchedStreamResponse, + ), } @@ -2005,6 +2565,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, configuration_batched_stream_request: "ConfigurationBatchedStreamRequest" + ) -> AsyncIterator["ConfigurationBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, configuration_batched_stream_request: "ConfigurationBatchedStreamRequest" + ) -> AsyncIterator["ConfigurationBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ConfigurationRequest, ConfigurationResponse]", @@ -2063,6 +2633,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ConfigurationBatchedStreamRequest, ConfigurationBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ConfigurationBatchedStreamRequest, ConfigurationBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configstatus.v1.ConfigurationService/GetOne": grpclib.const.Handler( @@ -2101,6 +2693,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ConfigurationStreamRequest, MetaResponse, ), + "/arista.configstatus.v1.ConfigurationService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigurationBatchedStreamRequest, + ConfigurationBatchedStreamResponse, + ), + "/arista.configstatus.v1.ConfigurationService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ConfigurationBatchedStreamRequest, + ConfigurationBatchedStreamResponse, + ), } @@ -2136,6 +2740,18 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + security_profile_batched_stream_request: "SecurityProfileBatchedStreamRequest", + ) -> AsyncIterator["SecurityProfileBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + security_profile_batched_stream_request: "SecurityProfileBatchedStreamRequest", + ) -> AsyncIterator["SecurityProfileBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[SecurityProfileRequest, SecurityProfileResponse]", @@ -2196,6 +2812,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[SecurityProfileBatchedStreamRequest, SecurityProfileBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[SecurityProfileBatchedStreamRequest, SecurityProfileBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configstatus.v1.SecurityProfileService/GetOne": grpclib.const.Handler( @@ -2234,6 +2872,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SecurityProfileStreamRequest, MetaResponse, ), + "/arista.configstatus.v1.SecurityProfileService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecurityProfileBatchedStreamRequest, + SecurityProfileBatchedStreamResponse, + ), + "/arista.configstatus.v1.SecurityProfileService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecurityProfileBatchedStreamRequest, + SecurityProfileBatchedStreamResponse, + ), } @@ -2269,6 +2919,18 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + security_profile_diff_batched_stream_request: "SecurityProfileDiffBatchedStreamRequest", + ) -> AsyncIterator["SecurityProfileDiffBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + security_profile_diff_batched_stream_request: "SecurityProfileDiffBatchedStreamRequest", + ) -> AsyncIterator["SecurityProfileDiffBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[SecurityProfileDiffRequest, SecurityProfileDiffResponse]", @@ -2329,6 +2991,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[SecurityProfileDiffBatchedStreamRequest, SecurityProfileDiffBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[SecurityProfileDiffBatchedStreamRequest, SecurityProfileDiffBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configstatus.v1.SecurityProfileDiffService/GetOne": grpclib.const.Handler( @@ -2367,6 +3051,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SecurityProfileDiffStreamRequest, MetaResponse, ), + "/arista.configstatus.v1.SecurityProfileDiffService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecurityProfileDiffBatchedStreamRequest, + SecurityProfileDiffBatchedStreamResponse, + ), + "/arista.configstatus.v1.SecurityProfileDiffService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecurityProfileDiffBatchedStreamRequest, + SecurityProfileDiffBatchedStreamResponse, + ), } @@ -2407,6 +3103,18 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + security_profile_diff_summary_batched_stream_request: "SecurityProfileDiffSummaryBatchedStreamRequest", + ) -> AsyncIterator["SecurityProfileDiffSummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + security_profile_diff_summary_batched_stream_request: "SecurityProfileDiffSummaryBatchedStreamRequest", + ) -> AsyncIterator["SecurityProfileDiffSummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[SecurityProfileDiffSummaryRequest, SecurityProfileDiffSummaryResponse]", @@ -2467,6 +3175,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[SecurityProfileDiffSummaryBatchedStreamRequest, SecurityProfileDiffSummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[SecurityProfileDiffSummaryBatchedStreamRequest, SecurityProfileDiffSummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configstatus.v1.SecurityProfileDiffSummaryService/GetOne": grpclib.const.Handler( @@ -2505,6 +3235,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SecurityProfileDiffSummaryStreamRequest, MetaResponse, ), + "/arista.configstatus.v1.SecurityProfileDiffSummaryService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecurityProfileDiffSummaryBatchedStreamRequest, + SecurityProfileDiffSummaryBatchedStreamResponse, + ), + "/arista.configstatus.v1.SecurityProfileDiffSummaryService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecurityProfileDiffSummaryBatchedStreamRequest, + SecurityProfileDiffSummaryBatchedStreamResponse, + ), } @@ -2538,6 +3280,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, summary_batched_stream_request: "SummaryBatchedStreamRequest" + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, summary_batched_stream_request: "SummaryBatchedStreamRequest" + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[SummaryRequest, SummaryResponse]" ) -> None: @@ -2594,6 +3346,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[SummaryBatchedStreamRequest, SummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[SummaryBatchedStreamRequest, SummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.configstatus.v1.SummaryService/GetOne": grpclib.const.Handler( @@ -2632,4 +3406,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SummaryStreamRequest, MetaResponse, ), + "/arista.configstatus.v1.SummaryService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SummaryBatchedStreamRequest, + SummaryBatchedStreamResponse, + ), + "/arista.configstatus.v1.SummaryService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SummaryBatchedStreamRequest, + SummaryBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/dashboard/__init__.py b/python-avd/pyavd/_cv/api/arista/connectivitymonitor/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/dashboard/__init__.py rename to python-avd/pyavd/_cv/api/arista/connectivitymonitor/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/connectivitymonitor/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/connectivitymonitor/v1/__init__.py similarity index 74% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/connectivitymonitor/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/connectivitymonitor/v1/__init__.py index 540a5feabf1..cc7ca2ba651 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/connectivitymonitor/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/connectivitymonitor/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from ... import ( subscriptions as __subscriptions__, @@ -287,6 +279,57 @@ class ProbeStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ProbeBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Probe"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Probe at end. + * Each Probe response is fully-specified (all fields set). + * start: Returns the state of each Probe at start, followed by updates until now. + * Each Probe response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Probe at start, followed by updates + until end. + * Each Probe response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ProbeBatchedStreamResponse(aristaproto.Message): + responses: List["ProbeStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class ProbeStatsRequest(aristaproto.Message): key: "ProbeStatsKey" = aristaproto.message_field(1) @@ -400,6 +443,57 @@ class ProbeStatsStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class ProbeStatsBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ProbeStats"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ProbeStats at end. + * Each ProbeStats response is fully-specified (all fields set). + * start: Returns the state of each ProbeStats at start, followed by updates until now. + * Each ProbeStats response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ProbeStats at start, followed by updates + until end. + * Each ProbeStats response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ProbeStatsBatchedStreamResponse(aristaproto.Message): + responses: List["ProbeStatsStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + class ProbeServiceStub(aristaproto.ServiceStub): async def get_one( self, @@ -507,6 +601,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + probe_batched_stream_request: "ProbeBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProbeBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.connectivitymonitor.v1.ProbeService/GetAllBatched", + probe_batched_stream_request, + ProbeBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + probe_batched_stream_request: "ProbeBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProbeBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.connectivitymonitor.v1.ProbeService/SubscribeBatched", + probe_batched_stream_request, + ProbeBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ProbeStatsServiceStub(aristaproto.ServiceStub): async def get_one( @@ -615,6 +745,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + probe_stats_batched_stream_request: "ProbeStatsBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProbeStatsBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.connectivitymonitor.v1.ProbeStatsService/GetAllBatched", + probe_stats_batched_stream_request, + ProbeStatsBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + probe_stats_batched_stream_request: "ProbeStatsBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProbeStatsBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.connectivitymonitor.v1.ProbeStatsService/SubscribeBatched", + probe_stats_batched_stream_request, + ProbeStatsBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class ProbeServiceBase(ServiceBase): @@ -646,6 +812,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, probe_batched_stream_request: "ProbeBatchedStreamRequest" + ) -> AsyncIterator["ProbeBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, probe_batched_stream_request: "ProbeBatchedStreamRequest" + ) -> AsyncIterator["ProbeBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ProbeRequest, ProbeResponse]" ) -> None: @@ -700,6 +876,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ProbeBatchedStreamRequest, ProbeBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ProbeBatchedStreamRequest, ProbeBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.connectivitymonitor.v1.ProbeService/GetOne": grpclib.const.Handler( @@ -738,6 +936,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ProbeStreamRequest, MetaResponse, ), + "/arista.connectivitymonitor.v1.ProbeService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ProbeBatchedStreamRequest, + ProbeBatchedStreamResponse, + ), + "/arista.connectivitymonitor.v1.ProbeService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ProbeBatchedStreamRequest, + ProbeBatchedStreamResponse, + ), } @@ -773,6 +983,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, probe_stats_batched_stream_request: "ProbeStatsBatchedStreamRequest" + ) -> AsyncIterator["ProbeStatsBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, probe_stats_batched_stream_request: "ProbeStatsBatchedStreamRequest" + ) -> AsyncIterator["ProbeStatsBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[ProbeStatsRequest, ProbeStatsResponse]" ) -> None: @@ -830,6 +1050,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ProbeStatsBatchedStreamRequest, ProbeStatsBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ProbeStatsBatchedStreamRequest, ProbeStatsBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.connectivitymonitor.v1.ProbeStatsService/GetOne": grpclib.const.Handler( @@ -868,4 +1110,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: ProbeStatsStreamRequest, MetaResponse, ), + "/arista.connectivitymonitor.v1.ProbeStatsService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ProbeStatsBatchedStreamRequest, + ProbeStatsBatchedStreamResponse, + ), + "/arista.connectivitymonitor.v1.ProbeStatsService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ProbeStatsBatchedStreamRequest, + ProbeStatsBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/endpointlocation/__init__.py b/python-avd/pyavd/_cv/api/arista/dashboard/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/endpointlocation/__init__.py rename to python-avd/pyavd/_cv/api/arista/dashboard/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/dashboard/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/dashboard/v1/__init__.py similarity index 77% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/dashboard/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/dashboard/v1/__init__.py index 15a7d3d5393..cbeb4b5df7c 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/dashboard/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/dashboard/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -438,6 +430,64 @@ class DashboardStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class DashboardBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Dashboard"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + filter: List["Filter"] = aristaproto.message_field(2) + """ + For each Dashboard in the list, all populated fields are considered ANDed together + as a filtering operation. Similarly, the list itself is ORed such that any individual + filter that matches a given Dashboard is streamed to the user. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Dashboard at end. + * Each Dashboard response is fully-specified (all fields set). + * start: Returns the state of each Dashboard at start, followed by updates until now. + * Each Dashboard response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Dashboard at start, followed by updates + until end. + * Each Dashboard response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DashboardBatchedStreamResponse(aristaproto.Message): + responses: List["DashboardStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class DashboardConfigRequest(aristaproto.Message): key: "DashboardKey" = aristaproto.message_field(1) @@ -553,6 +603,57 @@ class DashboardConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class DashboardConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["DashboardConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DashboardConfig at end. + * Each DashboardConfig response is fully-specified (all fields set). + * start: Returns the state of each DashboardConfig at start, followed by updates until now. + * Each DashboardConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DashboardConfig at start, followed by updates + until end. + * Each DashboardConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DashboardConfigBatchedStreamResponse(aristaproto.Message): + responses: List["DashboardConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class DashboardConfigSetRequest(aristaproto.Message): value: "DashboardConfig" = aristaproto.message_field(1) @@ -749,6 +850,59 @@ class GlobalDashboardConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class GlobalDashboardConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["GlobalDashboardConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each GlobalDashboardConfig at end. + * Each GlobalDashboardConfig response is fully-specified (all fields set). + * start: Returns the state of each GlobalDashboardConfig at start, followed by updates until now. + * Each GlobalDashboardConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each GlobalDashboardConfig at start, followed by updates + until end. + * Each GlobalDashboardConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class GlobalDashboardConfigBatchedStreamResponse(aristaproto.Message): + responses: List["GlobalDashboardConfigStreamResponse"] = aristaproto.message_field( + 1 + ) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class GlobalDashboardConfigSetRequest(aristaproto.Message): value: "GlobalDashboardConfig" = aristaproto.message_field(1) @@ -883,6 +1037,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + dashboard_batched_stream_request: "DashboardBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DashboardBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.dashboard.v1.DashboardService/GetAllBatched", + dashboard_batched_stream_request, + DashboardBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + dashboard_batched_stream_request: "DashboardBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DashboardBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.dashboard.v1.DashboardService/SubscribeBatched", + dashboard_batched_stream_request, + DashboardBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class DashboardConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1079,6 +1269,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + dashboard_config_batched_stream_request: "DashboardConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DashboardConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.dashboard.v1.DashboardConfigService/GetAllBatched", + dashboard_config_batched_stream_request, + DashboardConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + dashboard_config_batched_stream_request: "DashboardConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DashboardConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.dashboard.v1.DashboardConfigService/SubscribeBatched", + dashboard_config_batched_stream_request, + DashboardConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class GlobalDashboardConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1169,6 +1395,42 @@ async def set( metadata=metadata, ) + async def get_all_batched( + self, + global_dashboard_config_batched_stream_request: "GlobalDashboardConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["GlobalDashboardConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.dashboard.v1.GlobalDashboardConfigService/GetAllBatched", + global_dashboard_config_batched_stream_request, + GlobalDashboardConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + global_dashboard_config_batched_stream_request: "GlobalDashboardConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["GlobalDashboardConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.dashboard.v1.GlobalDashboardConfigService/SubscribeBatched", + global_dashboard_config_batched_stream_request, + GlobalDashboardConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class DashboardServiceBase(ServiceBase): @@ -1202,6 +1464,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, dashboard_batched_stream_request: "DashboardBatchedStreamRequest" + ) -> AsyncIterator["DashboardBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, dashboard_batched_stream_request: "DashboardBatchedStreamRequest" + ) -> AsyncIterator["DashboardBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[DashboardRequest, DashboardResponse]" ) -> None: @@ -1259,6 +1531,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DashboardBatchedStreamRequest, DashboardBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DashboardBatchedStreamRequest, DashboardBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.dashboard.v1.DashboardService/GetOne": grpclib.const.Handler( @@ -1297,6 +1591,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: DashboardStreamRequest, MetaResponse, ), + "/arista.dashboard.v1.DashboardService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DashboardBatchedStreamRequest, + DashboardBatchedStreamResponse, + ), + "/arista.dashboard.v1.DashboardService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DashboardBatchedStreamRequest, + DashboardBatchedStreamResponse, + ), } @@ -1357,6 +1663,18 @@ async def delete_all( ) -> AsyncIterator["DashboardConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + dashboard_config_batched_stream_request: "DashboardConfigBatchedStreamRequest", + ) -> AsyncIterator["DashboardConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + dashboard_config_batched_stream_request: "DashboardConfigBatchedStreamRequest", + ) -> AsyncIterator["DashboardConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[DashboardConfigRequest, DashboardConfigResponse]", @@ -1466,6 +1784,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DashboardConfigBatchedStreamRequest, DashboardConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DashboardConfigBatchedStreamRequest, DashboardConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.dashboard.v1.DashboardConfigService/GetOne": grpclib.const.Handler( @@ -1534,6 +1874,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: DashboardConfigDeleteAllRequest, DashboardConfigDeleteAllResponse, ), + "/arista.dashboard.v1.DashboardConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DashboardConfigBatchedStreamRequest, + DashboardConfigBatchedStreamResponse, + ), + "/arista.dashboard.v1.DashboardConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DashboardConfigBatchedStreamRequest, + DashboardConfigBatchedStreamResponse, + ), } @@ -1567,6 +1919,18 @@ async def set( ) -> "GlobalDashboardConfigSetResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + global_dashboard_config_batched_stream_request: "GlobalDashboardConfigBatchedStreamRequest", + ) -> AsyncIterator["GlobalDashboardConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + global_dashboard_config_batched_stream_request: "GlobalDashboardConfigBatchedStreamRequest", + ) -> AsyncIterator["GlobalDashboardConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[GlobalDashboardConfigRequest, GlobalDashboardConfigResponse]", @@ -1616,6 +1980,28 @@ async def __rpc_set( response = await self.set(request) await stream.send_message(response) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[GlobalDashboardConfigBatchedStreamRequest, GlobalDashboardConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[GlobalDashboardConfigBatchedStreamRequest, GlobalDashboardConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.dashboard.v1.GlobalDashboardConfigService/GetOne": grpclib.const.Handler( @@ -1648,4 +2034,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: GlobalDashboardConfigSetRequest, GlobalDashboardConfigSetResponse, ), + "/arista.dashboard.v1.GlobalDashboardConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + GlobalDashboardConfigBatchedStreamRequest, + GlobalDashboardConfigBatchedStreamResponse, + ), + "/arista.dashboard.v1.GlobalDashboardConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + GlobalDashboardConfigBatchedStreamRequest, + GlobalDashboardConfigBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/event/__init__.py b/python-avd/pyavd/_cv/api/arista/endpointlocation/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/event/__init__.py rename to python-avd/pyavd/_cv/api/arista/endpointlocation/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/endpointlocation/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/endpointlocation/v1/__init__.py similarity index 84% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/endpointlocation/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/endpointlocation/v1/__init__.py index 596134cfd1b..599b8d6614a 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/endpointlocation/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/endpointlocation/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -735,6 +727,57 @@ class EndpointLocationStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class EndpointLocationBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["EndpointLocation"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each EndpointLocation at end. + * Each EndpointLocation response is fully-specified (all fields set). + * start: Returns the state of each EndpointLocation at start, followed by updates until now. + * Each EndpointLocation response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each EndpointLocation at start, followed by updates + until end. + * Each EndpointLocation response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class EndpointLocationBatchedStreamResponse(aristaproto.Message): + responses: List["EndpointLocationStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + class EndpointLocationServiceStub(aristaproto.ServiceStub): async def get_one( self, @@ -842,6 +885,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + endpoint_location_batched_stream_request: "EndpointLocationBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["EndpointLocationBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.endpointlocation.v1.EndpointLocationService/GetAllBatched", + endpoint_location_batched_stream_request, + EndpointLocationBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + endpoint_location_batched_stream_request: "EndpointLocationBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["EndpointLocationBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.endpointlocation.v1.EndpointLocationService/SubscribeBatched", + endpoint_location_batched_stream_request, + EndpointLocationBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class EndpointLocationServiceBase(ServiceBase): @@ -875,6 +954,18 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + endpoint_location_batched_stream_request: "EndpointLocationBatchedStreamRequest", + ) -> AsyncIterator["EndpointLocationBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + endpoint_location_batched_stream_request: "EndpointLocationBatchedStreamRequest", + ) -> AsyncIterator["EndpointLocationBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[EndpointLocationRequest, EndpointLocationResponse]", @@ -935,6 +1026,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[EndpointLocationBatchedStreamRequest, EndpointLocationBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[EndpointLocationBatchedStreamRequest, EndpointLocationBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.endpointlocation.v1.EndpointLocationService/GetOne": grpclib.const.Handler( @@ -973,4 +1086,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: EndpointLocationStreamRequest, MetaResponse, ), + "/arista.endpointlocation.v1.EndpointLocationService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + EndpointLocationBatchedStreamRequest, + EndpointLocationBatchedStreamResponse, + ), + "/arista.endpointlocation.v1.EndpointLocationService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + EndpointLocationBatchedStreamRequest, + EndpointLocationBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/identityprovider/__init__.py b/python-avd/pyavd/_cv/api/arista/event/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/identityprovider/__init__.py rename to python-avd/pyavd/_cv/api/arista/event/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/event/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/event/v1/__init__.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/event/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/event/v1/__init__.py index 452945baecc..c7036d0a3b5 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/event/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/event/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/imagestatus/__init__.py b/python-avd/pyavd/_cv/api/arista/identityprovider/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/imagestatus/__init__.py rename to python-avd/pyavd/_cv/api/arista/identityprovider/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/identityprovider/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/identityprovider/v1/__init__.py similarity index 84% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/identityprovider/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/identityprovider/v1/__init__.py index 8e8b00668e6..d7814b1f6a0 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/identityprovider/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/identityprovider/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -408,6 +400,57 @@ class OAuthConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class OAuthConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["OAuthConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each OAuthConfig at end. + * Each OAuthConfig response is fully-specified (all fields set). + * start: Returns the state of each OAuthConfig at start, followed by updates until now. + * Each OAuthConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each OAuthConfig at start, followed by updates + until end. + * Each OAuthConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class OAuthConfigBatchedStreamResponse(aristaproto.Message): + responses: List["OAuthConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class OAuthConfigSetRequest(aristaproto.Message): value: "OAuthConfig" = aristaproto.message_field(1) @@ -635,6 +678,57 @@ class SamlConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SamlConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["SamlConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each SAMLConfig at end. + * Each SAMLConfig response is fully-specified (all fields set). + * start: Returns the state of each SAMLConfig at start, followed by updates until now. + * Each SAMLConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each SAMLConfig at start, followed by updates + until end. + * Each SAMLConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class SamlConfigBatchedStreamResponse(aristaproto.Message): + responses: List["SamlConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class SamlConfigSetRequest(aristaproto.Message): value: "SamlConfig" = aristaproto.message_field(1) @@ -944,6 +1038,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + o_auth_config_batched_stream_request: "OAuthConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["OAuthConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.identityprovider.v1.OAuthConfigService/GetAllBatched", + o_auth_config_batched_stream_request, + OAuthConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + o_auth_config_batched_stream_request: "OAuthConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["OAuthConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.identityprovider.v1.OAuthConfigService/SubscribeBatched", + o_auth_config_batched_stream_request, + OAuthConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class SamlConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -1140,6 +1270,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + saml_config_batched_stream_request: "SamlConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SamlConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.identityprovider.v1.SAMLConfigService/GetAllBatched", + saml_config_batched_stream_request, + SamlConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + saml_config_batched_stream_request: "SamlConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SamlConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.identityprovider.v1.SAMLConfigService/SubscribeBatched", + saml_config_batched_stream_request, + SamlConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class OAuthConfigServiceBase(ServiceBase): @@ -1198,6 +1364,16 @@ async def delete_all( ) -> AsyncIterator["OAuthConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, o_auth_config_batched_stream_request: "OAuthConfigBatchedStreamRequest" + ) -> AsyncIterator["OAuthConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, o_auth_config_batched_stream_request: "OAuthConfigBatchedStreamRequest" + ) -> AsyncIterator["OAuthConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[OAuthConfigRequest, OAuthConfigResponse]" ) -> None: @@ -1304,6 +1480,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[OAuthConfigBatchedStreamRequest, OAuthConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[OAuthConfigBatchedStreamRequest, OAuthConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.identityprovider.v1.OAuthConfigService/GetOne": grpclib.const.Handler( @@ -1372,6 +1570,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: OAuthConfigDeleteAllRequest, OAuthConfigDeleteAllResponse, ), + "/arista.identityprovider.v1.OAuthConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + OAuthConfigBatchedStreamRequest, + OAuthConfigBatchedStreamResponse, + ), + "/arista.identityprovider.v1.OAuthConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + OAuthConfigBatchedStreamRequest, + OAuthConfigBatchedStreamResponse, + ), } @@ -1432,6 +1642,16 @@ async def delete_all( ) -> AsyncIterator["SamlConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, saml_config_batched_stream_request: "SamlConfigBatchedStreamRequest" + ) -> AsyncIterator["SamlConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, saml_config_batched_stream_request: "SamlConfigBatchedStreamRequest" + ) -> AsyncIterator["SamlConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[SamlConfigRequest, SamlConfigResponse]" ) -> None: @@ -1538,6 +1758,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[SamlConfigBatchedStreamRequest, SamlConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[SamlConfigBatchedStreamRequest, SamlConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.identityprovider.v1.SAMLConfigService/GetOne": grpclib.const.Handler( @@ -1606,4 +1848,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SamlConfigDeleteAllRequest, SamlConfigDeleteAllResponse, ), + "/arista.identityprovider.v1.SAMLConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SamlConfigBatchedStreamRequest, + SamlConfigBatchedStreamResponse, + ), + "/arista.identityprovider.v1.SAMLConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SamlConfigBatchedStreamRequest, + SamlConfigBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/inventory/__init__.py b/python-avd/pyavd/_cv/api/arista/imagestatus/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/inventory/__init__.py rename to python-avd/pyavd/_cv/api/arista/imagestatus/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/imagestatus/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/imagestatus/v1/__init__.py similarity index 71% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/imagestatus/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/imagestatus/v1/__init__.py index 500e1ac087d..9c0ba368577 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/imagestatus/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/imagestatus/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from ... import ( subscriptions as __subscriptions__, @@ -203,6 +195,12 @@ class ErrorCode(aristaproto.Enum): version is lower than the embedded TerminAttr version in SWI. """ + DEVICE_EOS_2GB_INCOMPATIBLE = 15 + """ + ERROR_CODE_DEVICE_EOS_2GB_INCOMPATIBLE represents the case where a 2GB-device is incompatible + with a non-2GB EOS or a non-2GB device is incompatible with a 2GB-EOS. + """ + class WarningCode(aristaproto.Enum): """WarningCode indicates warnings produced during image validations.""" @@ -294,6 +292,18 @@ class WarningCode(aristaproto.Enum): TerminAttr version is below CloudVision's minimum supported version. """ + TA_STUDIO_INCOMPATIBLE = 15 + """ + WARNING_CODE_TA_STUDIO_INCOMPATIBLE represents cases where the TerminAttr is incompatible + with Software Management Studio. + """ + + BUGALERTS_DATA_MISSING = 16 + """ + WARNING_CODE_BUGALERTS_DATA_MISSING represents cases where some of the BugAlerts data + under Aeris analytics dataset is missing. + """ + @dataclass(eq=False, repr=False) class SoftwareImage(aristaproto.Message): @@ -693,6 +703,27 @@ class ImageWarnings(aristaproto.Message): """values is a list of image warnings.""" +@dataclass(eq=False, repr=False) +class MetaResponse(aristaproto.Message): + time: datetime = aristaproto.message_field(1) + """ + Time holds the timestamp of the last item included in the metadata calculation. + """ + + type: "__subscriptions__.Operation" = aristaproto.enum_field(2) + """ + Operation indicates how the value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + count: Optional[int] = aristaproto.message_field(3, wraps=aristaproto.TYPE_UINT32) + """ + Count is the number of items present under the conditions of the request. + """ + + @dataclass(eq=False, repr=False) class SummaryRequest(aristaproto.Message): key: "SummaryKey" = aristaproto.message_field(1) @@ -725,6 +756,35 @@ class SummaryResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SummarySomeRequest(aristaproto.Message): + keys: List["SummaryKey"] = aristaproto.message_field(1) + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class SummarySomeResponse(aristaproto.Message): + value: "Summary" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """ + Error is an optional field. + It should be filled when there is an error in the GetSome process. + """ + + time: datetime = aristaproto.message_field(3) + + @dataclass(eq=False, repr=False) class SummaryStreamRequest(aristaproto.Message): partial_eq_filter: List["Summary"] = aristaproto.message_field(1) @@ -777,6 +837,57 @@ class SummaryStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SummaryBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Summary"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Summary at end. + * Each Summary response is fully-specified (all fields set). + * start: Returns the state of each Summary at start, followed by updates until now. + * Each Summary response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Summary at start, followed by updates + until end. + * Each Summary response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class SummaryBatchedStreamResponse(aristaproto.Message): + responses: List["SummaryStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + class SummaryServiceStub(aristaproto.ServiceStub): async def get_one( self, @@ -795,6 +906,24 @@ async def get_one( metadata=metadata, ) + async def get_some( + self, + summary_some_request: "SummarySomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SummarySomeResponse"]: + async for response in self._unary_stream( + "/arista.imagestatus.v1.SummaryService/GetSome", + summary_some_request, + SummarySomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + async def get_all( self, summary_stream_request: "SummaryStreamRequest", @@ -831,12 +960,88 @@ async def subscribe( ): yield response + async def get_meta( + self, + summary_stream_request: "SummaryStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "MetaResponse": + return await self._unary_unary( + "/arista.imagestatus.v1.SummaryService/GetMeta", + summary_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def subscribe_meta( + self, + summary_stream_request: "SummaryStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.imagestatus.v1.SummaryService/SubscribeMeta", + summary_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + summary_batched_stream_request: "SummaryBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.imagestatus.v1.SummaryService/GetAllBatched", + summary_batched_stream_request, + SummaryBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + summary_batched_stream_request: "SummaryBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.imagestatus.v1.SummaryService/SubscribeBatched", + summary_batched_stream_request, + SummaryBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class SummaryServiceBase(ServiceBase): async def get_one(self, summary_request: "SummaryRequest") -> "SummaryResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_some( + self, summary_some_request: "SummarySomeRequest" + ) -> AsyncIterator["SummarySomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all( self, summary_stream_request: "SummaryStreamRequest" ) -> AsyncIterator["SummaryStreamResponse"]: @@ -847,6 +1052,26 @@ async def subscribe( ) -> AsyncIterator["SummaryStreamResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_meta( + self, summary_stream_request: "SummaryStreamRequest" + ) -> "MetaResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_meta( + self, summary_stream_request: "SummaryStreamRequest" + ) -> AsyncIterator["MetaResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all_batched( + self, summary_batched_stream_request: "SummaryBatchedStreamRequest" + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, summary_batched_stream_request: "SummaryBatchedStreamRequest" + ) -> AsyncIterator["SummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[SummaryRequest, SummaryResponse]" ) -> None: @@ -854,6 +1079,16 @@ async def __rpc_get_one( response = await self.get_one(request) await stream.send_message(response) + async def __rpc_get_some( + self, stream: "grpclib.server.Stream[SummarySomeRequest, SummarySomeResponse]" + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_some, + stream, + request, + ) + async def __rpc_get_all( self, stream: "grpclib.server.Stream[SummaryStreamRequest, SummaryStreamResponse]", @@ -876,6 +1111,45 @@ async def __rpc_subscribe( request, ) + async def __rpc_get_meta( + self, stream: "grpclib.server.Stream[SummaryStreamRequest, MetaResponse]" + ) -> None: + request = await stream.recv_message() + response = await self.get_meta(request) + await stream.send_message(response) + + async def __rpc_subscribe_meta( + self, stream: "grpclib.server.Stream[SummaryStreamRequest, MetaResponse]" + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_meta, + stream, + request, + ) + + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[SummaryBatchedStreamRequest, SummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[SummaryBatchedStreamRequest, SummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.imagestatus.v1.SummaryService/GetOne": grpclib.const.Handler( @@ -884,6 +1158,12 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SummaryRequest, SummaryResponse, ), + "/arista.imagestatus.v1.SummaryService/GetSome": grpclib.const.Handler( + self.__rpc_get_some, + grpclib.const.Cardinality.UNARY_STREAM, + SummarySomeRequest, + SummarySomeResponse, + ), "/arista.imagestatus.v1.SummaryService/GetAll": grpclib.const.Handler( self.__rpc_get_all, grpclib.const.Cardinality.UNARY_STREAM, @@ -896,4 +1176,28 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SummaryStreamRequest, SummaryStreamResponse, ), + "/arista.imagestatus.v1.SummaryService/GetMeta": grpclib.const.Handler( + self.__rpc_get_meta, + grpclib.const.Cardinality.UNARY_UNARY, + SummaryStreamRequest, + MetaResponse, + ), + "/arista.imagestatus.v1.SummaryService/SubscribeMeta": grpclib.const.Handler( + self.__rpc_subscribe_meta, + grpclib.const.Cardinality.UNARY_STREAM, + SummaryStreamRequest, + MetaResponse, + ), + "/arista.imagestatus.v1.SummaryService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SummaryBatchedStreamRequest, + SummaryBatchedStreamResponse, + ), + "/arista.imagestatus.v1.SummaryService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SummaryBatchedStreamRequest, + SummaryBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/lifecycle/__init__.py b/python-avd/pyavd/_cv/api/arista/inventory/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/lifecycle/__init__.py rename to python-avd/pyavd/_cv/api/arista/inventory/__init__.py diff --git a/python-avd/pyavd/_cv/api/arista/inventory/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/inventory/v1/__init__.py new file mode 100644 index 00000000000..6d792dc8925 --- /dev/null +++ b/python-avd/pyavd/_cv/api/arista/inventory/v1/__init__.py @@ -0,0 +1,3994 @@ +# Copyright (c) 2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. +# Generated by the protocol buffer compiler. DO NOT EDIT! +# sources: arista/inventory.v1/inventory.proto, arista/inventory.v1/services.gen.proto +# plugin: python-aristaproto +# This file has been @generated + +from dataclasses import dataclass +from datetime import datetime +from typing import ( + TYPE_CHECKING, + AsyncIterator, + Dict, + List, + Optional, +) + +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase + +from .... import fmp as ___fmp__ +from ... import ( + subscriptions as __subscriptions__, + time as __time__, +) + + +if TYPE_CHECKING: + import grpclib.server + from aristaproto.grpc.grpclib_client import MetadataLike + from grpclib.metadata import Deadline + + +class StreamingStatus(aristaproto.Enum): + """ + StreamingStatus defines the status of telemetry streaming for a device. + """ + + UNSPECIFIED = 0 + INACTIVE = 1 + """ + STREAMING_STATUS_INACTIVE indicates the device is not streaming telemetry. + """ + + ACTIVE = 2 + """STREAMING_STATUS_ACTIVE indicates the device is streaming telemetry.""" + + +class OnboardingStatus(aristaproto.Enum): + """ + OnboardingStatus defines the set of possible states in the onboarding process + for a device. + """ + + UNSPECIFIED = 0 + IN_PROGRESS = 1 + """ONBOARDING_STATUS_IN_PROGRESS indicates onboarding is in progress.""" + + FAILURE = 2 + """ONBOARDING_STATUS_FAILURE indicates onboarding failed.""" + + SUCCESS = 3 + """ONBOARDING_STATUS_SUCCESS indicates onboarding succeeded.""" + + +class DecommissioningStatus(aristaproto.Enum): + """ + DecommissioningStatus defines the set of possible states in the decommissioning + process for a device. + """ + + UNSPECIFIED = 0 + IN_PROGRESS = 1 + """ + DECOMMISSIONING_STATUS_IN_PROGRESS indicates decommissioning is in progress. + """ + + FAILURE = 2 + """DECOMMISSIONING_STATUS_FAILURE indicates decommissioning failed.""" + + SUCCESS = 3 + """DECOMMISSIONING_STATUS_SUCCESS indicates decommissioning succeeded.""" + + +class ProvisioningStatus(aristaproto.Enum): + """ + ProvisioningStatus defines the set of possible states in the provisioning + process for a device. + """ + + UNSPECIFIED = 0 + IN_PROGRESS = 1 + """ + PROVISIONING_STATUS_IN_PROGRESS indicates provisioning is in progress. + """ + + FAILURE = 2 + """PROVISIONING_STATUS_FAILURE indicates provisioning failed.""" + + SUCCESS = 3 + """PROVISIONING_STATUS_SUCCESS indicates provisioning succeeded.""" + + +@dataclass(eq=False, repr=False) +class ExtendedAttributes(aristaproto.Message): + """ + ExtendedAttributes wraps any additional, potentially non-standard, features + or attributes the device reports. + """ + + feature_enabled: Dict[str, bool] = aristaproto.map_field( + 1, aristaproto.TYPE_STRING, aristaproto.TYPE_BOOL + ) + """ + feature_enabled is a map of feature name to enabled status. + If a feature is missing from this map it can be assumed off. + """ + + +@dataclass(eq=False, repr=False) +class DeviceKey(aristaproto.Message): + """DeviceKey uniquely identifies a single device.""" + + device_id: Optional[str] = aristaproto.message_field( + 1, wraps=aristaproto.TYPE_STRING + ) + """device_id is the unique identifier of the device.""" + + +@dataclass(eq=False, repr=False) +class DeviceConfiguration(aristaproto.Message): + """ + DeviceConfiguration holds the device-specific configuration for a third-party + device, as defined in https://github.com/aristanetworks/cloudvision-go. + """ + + options: Dict[str, str] = aristaproto.map_field( + 1, aristaproto.TYPE_STRING, aristaproto.TYPE_STRING + ) + """ + options is a map from device option to value. + + E.g., for an SNMP device, this could be the following: + + "address": "my_snmp_hostname", + "community": "public" + """ + + +@dataclass(eq=False, repr=False) +class UuidKey(aristaproto.Message): + """ + UUIDKey is a key that holds a UUID for an onboarding or decommissioning request. + """ + + request_id: Optional[str] = aristaproto.message_field( + 1, wraps=aristaproto.TYPE_STRING + ) + """request_id should be a UUID for the request.""" + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfig(aristaproto.Message): + """ + DeviceOnboardingConfig describes a device onboarding request. "Onboarding" + refers to the process of initiating device streaming to CloudVision and + adding the streaming device to CloudVision's inventory. + + The request flow works as follows: + + 1. Set on DeviceOnboardingConfig sends an onboarding request with a UUID + that the user is responsible for generating. + 2. Once the server receives the request, it validates and records it. + 3. Then, the server processes it, initiating the onboarding procedure and + tracking the status of the onboarding attempt. + 4. The user may do a GetOne or Subscribe on DeviceOnboarding using the same + UUID to see the status of the request. + """ + + key: "UuidKey" = aristaproto.message_field(1) + """key identifies the request to onboard the device at hostname_or_ip.""" + + hostname_or_ip: Optional[str] = aristaproto.message_field( + 2, wraps=aristaproto.TYPE_STRING + ) + """ + hostname_or_ip is a hostname or an IP at which the device can be reached. + """ + + device_type: Optional[str] = aristaproto.message_field( + 3, wraps=aristaproto.TYPE_STRING + ) + """ + device_type describes the method by which to retrieve information for the + device. The value should be "eos" for eos devices. For third-party devices, + supported values are: "openconfig", "snmp", "cvp", "mwm", and "vCenter". + """ + + device_config: "DeviceConfiguration" = aristaproto.message_field(4) + """device_config is the configuration for a third-party device.""" + + +@dataclass(eq=False, repr=False) +class DeviceOnboarding(aristaproto.Message): + """DeviceOnboarding describes the status of an onboarding process.""" + + key: "UuidKey" = aristaproto.message_field(1) + """ + key identifies the request for which to retrieve an onboarding status. + """ + + device_id: Optional[str] = aristaproto.message_field( + 2, wraps=aristaproto.TYPE_STRING + ) + """device_id is the unique device ID that is discovered via onboarding.""" + + status: "OnboardingStatus" = aristaproto.enum_field(3) + """status describes the onboarding status of the device.""" + + error: Optional[str] = aristaproto.message_field(4, wraps=aristaproto.TYPE_STRING) + """ + error is the error that caused status to become ONBOARDING_STATUS_FAILURE. + """ + + status_message: Optional[str] = aristaproto.message_field( + 5, wraps=aristaproto.TYPE_STRING + ) + """ + status_message contains information on the status of the onboarding attempt, + if any. This is generally an unstructured log message that is for display + purposes only (its structure and contents may change). + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfig(aristaproto.Message): + """ + DeviceDecommissioningConfig describes a device decommissioning request. + "Decommissioning" refers to the process of stopping device streaming to + CloudVision and removing it from CloudVision's inventory. + + The request flow works as follows: + + 1. Set on DeviceDecommissioningConfig sends a decommissioning request with + a UUID that the user is responsible for generating. + 2. Once the server receives the request, it validates and records it. + 3. Then, the server processes it, initiating the decommissioning procedure + and tracking the status of the decommissioning attempt. + 4. The user may do a GetOne or Subscribe on DeviceDecommissioning using the + same UUID to see the status of the request. + """ + + key: "UuidKey" = aristaproto.message_field(1) + """key identifies the request to decommission the device.""" + + device_id: Optional[str] = aristaproto.message_field( + 2, wraps=aristaproto.TYPE_STRING + ) + """ + device_id is the unique device ID that was discovered via onboarding. + """ + + force: Optional[bool] = aristaproto.message_field(3, wraps=aristaproto.TYPE_BOOL) + """ + force is a flag that indicates if the decommission is to be forced. + Normally, if there are pending or in-progress tasks associated with the device + the decommission would fail. In case of a forced decommission, such blocking + tasks would be ignored and decommissioning will be continued. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioning(aristaproto.Message): + """DeviceOnboarding describes the status of a decommissioning process.""" + + key: "UuidKey" = aristaproto.message_field(1) + """ + key identifies the request for which to retrieve a decommissioning status. + """ + + status: "DecommissioningStatus" = aristaproto.enum_field(2) + """status describes the decommissioning status of the device.""" + + error: Optional[str] = aristaproto.message_field(3, wraps=aristaproto.TYPE_STRING) + """ + error is the error that caused status to become DECOMMISSIONING_STATUS_FAILURE. + """ + + status_message: Optional[str] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_STRING + ) + """ + status_message contains information on the status of the decommissioning attempt, + if any. This is generally an unstructured log message that is for display + purposes only (its structure and contents may change). + """ + + +@dataclass(eq=False, repr=False) +class Device(aristaproto.Message): + """Device describes an onboarded device.""" + + key: "DeviceKey" = aristaproto.message_field(1) + """key uniquely identifies the device.""" + + software_version: Optional[str] = aristaproto.message_field( + 2, wraps=aristaproto.TYPE_STRING + ) + """ + software_version gives the currently running device software version. + """ + + model_name: Optional[str] = aristaproto.message_field( + 3, wraps=aristaproto.TYPE_STRING + ) + """model_name describes the hardware model of this device.""" + + hardware_revision: Optional[str] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_STRING + ) + """hardware_revision describes any revisional data to the model name.""" + + fqdn: Optional[str] = aristaproto.message_field(10, wraps=aristaproto.TYPE_STRING) + """fqdn gives the device's fully qualified domain name.""" + + hostname: Optional[str] = aristaproto.message_field( + 11, wraps=aristaproto.TYPE_STRING + ) + """hostname is the hostname as reported on the device.""" + + domain_name: Optional[str] = aristaproto.message_field( + 12, wraps=aristaproto.TYPE_STRING + ) + """ + domain_name provides the domain name on which the device is registered. + """ + + system_mac_address: Optional[str] = aristaproto.message_field( + 13, wraps=aristaproto.TYPE_STRING + ) + """system_mac_address provides the MAC address of the management port.""" + + boot_time: datetime = aristaproto.message_field(20) + """boot_time indicates when the device was last booted.""" + + streaming_status: "StreamingStatus" = aristaproto.enum_field(30) + """ + streaming_status is the status of telemetry streaming for this device. + """ + + extended_attributes: "ExtendedAttributes" = aristaproto.message_field(31) + """ + extended_attributes wraps any additional, potentially non-standard, features + or attributes that the device reports. + """ + + +@dataclass(eq=False, repr=False) +class ProvisionedDevice(aristaproto.Message): + """ + ProvisionedDevice describes the provisioning status of an onboarded device + if the onboarded device is configured for provisioning. + """ + + key: "DeviceKey" = aristaproto.message_field(1) + """key uniquely identifies the device.""" + + status: "ProvisioningStatus" = aristaproto.enum_field(2) + """status describes the onboarded device's provisioning status.""" + + error: Optional[str] = aristaproto.message_field(3, wraps=aristaproto.TYPE_STRING) + """ + error is the error that caused status to become PROVISIONING_STATUS_FAILURE. + """ + + ztp_mode: Optional[bool] = aristaproto.message_field(4, wraps=aristaproto.TYPE_BOOL) + """ztp_mode indicates whether the device is in ZTP mode.""" + + ip_address: "___fmp__.IpAddress" = aristaproto.message_field(5) + """ + ip_address is the current (post-provisioning) IP address of the device. + """ + + provisioning_group_name: Optional[str] = aristaproto.message_field( + 6, wraps=aristaproto.TYPE_STRING + ) + """ + provisioning_group_name is the name of the group (also known as a container) + to which the device belongs. Any provisioning operation performed on this + group will also be performed on this device. If the device is not yet provisioned, + this will not be set. Once it is provisioned, this will be set to "undefined_container" + which indicates that the device does not yet belong to a group. At this point, + a user may set it to an existing group. + """ + + +@dataclass(eq=False, repr=False) +class MetaResponse(aristaproto.Message): + time: datetime = aristaproto.message_field(1) + """ + Time holds the timestamp of the last item included in the metadata calculation. + """ + + type: "__subscriptions__.Operation" = aristaproto.enum_field(2) + """ + Operation indicates how the value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + count: Optional[int] = aristaproto.message_field(3, wraps=aristaproto.TYPE_UINT32) + """ + Count is the number of items present under the conditions of the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceRequest(aristaproto.Message): + key: "DeviceKey" = aristaproto.message_field(1) + """ + Key uniquely identifies a Device instance to retrieve. + This value must be populated. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceResponse(aristaproto.Message): + value: "Device" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time carries the (UTC) timestamp of the last-modification of the + Device instance in this response. + """ + + +@dataclass(eq=False, repr=False) +class DeviceSomeRequest(aristaproto.Message): + keys: List["DeviceKey"] = aristaproto.message_field(1) + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceSomeResponse(aristaproto.Message): + value: "Device" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """ + Error is an optional field. + It should be filled when there is an error in the GetSome process. + """ + + time: datetime = aristaproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class DeviceStreamRequest(aristaproto.Message): + partial_eq_filter: List["Device"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Device at end. + * Each Device response is fully-specified (all fields set). + * start: Returns the state of each Device at start, followed by updates until now. + * Each Device response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Device at start, followed by updates + until end. + * Each Device response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + +@dataclass(eq=False, repr=False) +class DeviceStreamResponse(aristaproto.Message): + value: "Device" = aristaproto.message_field(1) + """ + Value is a value deemed relevant to the initiating request. + This structure will always have its key-field populated. Which other fields are + populated, and why, depends on the value of Operation and what triggered this notification. + """ + + time: datetime = aristaproto.message_field(2) + """Time holds the timestamp of this Device's last modification.""" + + type: "__subscriptions__.Operation" = aristaproto.enum_field(3) + """ + Operation indicates how the Device value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + +@dataclass(eq=False, repr=False) +class DeviceBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Device"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Device at end. + * Each Device response is fully-specified (all fields set). + * start: Returns the state of each Device at start, followed by updates until now. + * Each Device response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Device at start, followed by updates + until end. + * Each Device response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DeviceBatchedStreamResponse(aristaproto.Message): + responses: List["DeviceStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningRequest(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key uniquely identifies a DeviceDecommissioning instance to retrieve. + This value must be populated. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningResponse(aristaproto.Message): + value: "DeviceDecommissioning" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time carries the (UTC) timestamp of the last-modification of the + DeviceDecommissioning instance in this response. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningSomeRequest(aristaproto.Message): + keys: List["UuidKey"] = aristaproto.message_field(1) + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningSomeResponse(aristaproto.Message): + value: "DeviceDecommissioning" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """ + Error is an optional field. + It should be filled when there is an error in the GetSome process. + """ + + time: datetime = aristaproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceDecommissioning"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceDecommissioning at end. + * Each DeviceDecommissioning response is fully-specified (all fields set). + * start: Returns the state of each DeviceDecommissioning at start, followed by updates until now. + * Each DeviceDecommissioning response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceDecommissioning at start, followed by updates + until end. + * Each DeviceDecommissioning response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningStreamResponse(aristaproto.Message): + value: "DeviceDecommissioning" = aristaproto.message_field(1) + """ + Value is a value deemed relevant to the initiating request. + This structure will always have its key-field populated. Which other fields are + populated, and why, depends on the value of Operation and what triggered this notification. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time holds the timestamp of this DeviceDecommissioning's last modification. + """ + + type: "__subscriptions__.Operation" = aristaproto.enum_field(3) + """ + Operation indicates how the DeviceDecommissioning value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceDecommissioning"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceDecommissioning at end. + * Each DeviceDecommissioning response is fully-specified (all fields set). + * start: Returns the state of each DeviceDecommissioning at start, followed by updates until now. + * Each DeviceDecommissioning response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceDecommissioning at start, followed by updates + until end. + * Each DeviceDecommissioning response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningBatchedStreamResponse(aristaproto.Message): + responses: List["DeviceDecommissioningStreamResponse"] = aristaproto.message_field( + 1 + ) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigRequest(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key uniquely identifies a DeviceDecommissioningConfig instance to retrieve. + This value must be populated. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigResponse(aristaproto.Message): + value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time carries the (UTC) timestamp of the last-modification of the + DeviceDecommissioningConfig instance in this response. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigSomeRequest(aristaproto.Message): + keys: List["UuidKey"] = aristaproto.message_field(1) + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigSomeResponse(aristaproto.Message): + value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """ + Error is an optional field. + It should be filled when there is an error in the GetSome process. + """ + + time: datetime = aristaproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceDecommissioningConfig"] = aristaproto.message_field( + 1 + ) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceDecommissioningConfig at end. + * Each DeviceDecommissioningConfig response is fully-specified (all fields set). + * start: Returns the state of each DeviceDecommissioningConfig at start, followed by updates until now. + * Each DeviceDecommissioningConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceDecommissioningConfig at start, followed by updates + until end. + * Each DeviceDecommissioningConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigStreamResponse(aristaproto.Message): + value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) + """ + Value is a value deemed relevant to the initiating request. + This structure will always have its key-field populated. Which other fields are + populated, and why, depends on the value of Operation and what triggered this notification. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time holds the timestamp of this DeviceDecommissioningConfig's last modification. + """ + + type: "__subscriptions__.Operation" = aristaproto.enum_field(3) + """ + Operation indicates how the DeviceDecommissioningConfig value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceDecommissioningConfig"] = aristaproto.message_field( + 1 + ) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceDecommissioningConfig at end. + * Each DeviceDecommissioningConfig response is fully-specified (all fields set). + * start: Returns the state of each DeviceDecommissioningConfig at start, followed by updates until now. + * Each DeviceDecommissioningConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceDecommissioningConfig at start, followed by updates + until end. + * Each DeviceDecommissioningConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigBatchedStreamResponse(aristaproto.Message): + responses: List["DeviceDecommissioningConfigStreamResponse"] = ( + aristaproto.message_field(1) + ) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigSetRequest(aristaproto.Message): + value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) + """ + DeviceDecommissioningConfig carries the value to set into the datastore. + See the documentation on the DeviceDecommissioningConfig struct for which fields are required. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigSetResponse(aristaproto.Message): + value: "DeviceDecommissioningConfig" = aristaproto.message_field(1) + """ + Value carries all the values given in the DeviceDecommissioningConfigSetRequest as well + as any server-generated values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the (UTC) timestamp at which the system recognizes the + creation. The only guarantees made about this timestamp are: + + - it is after the time the request was received + - a time-ranged query with StartTime==CreatedAt will include this instance. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigSetSomeRequest(aristaproto.Message): + values: List["DeviceDecommissioningConfig"] = aristaproto.message_field(1) + """ + value contains a list of DeviceDecommissioningConfig values to write. + It is possible to provide more values than can fit within either: + - the maxiumum send size of the client + - the maximum receive size of the server + If this error occurs you must reduce the number of values sent. + See gRPC "maximum message size" documentation for more information. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigSetSomeResponse(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + error: str = aristaproto.string_field(2) + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigDeleteRequest(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key indicates which DeviceDecommissioningConfig instance to remove. + This field must always be set. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigDeleteResponse(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key echoes back the key of the deleted DeviceDecommissioningConfig instance. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the (UTC) timestamp at which the system recognizes the + deletion. The only guarantees made about this timestamp are: + + - it is after the time the request was received + - a time-ranged query with StartTime==DeletedAt will not include this instance. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigDeleteSomeRequest(aristaproto.Message): + keys: List["UuidKey"] = aristaproto.message_field(1) + """key contains a list of DeviceDecommissioningConfig keys to delete""" + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigDeleteSomeResponse(aristaproto.Message): + """ + DeviceDecommissioningConfigDeleteSomeResponse is only sent when there is an error. + """ + + key: "UuidKey" = aristaproto.message_field(1) + error: str = aristaproto.string_field(2) + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigDeleteAllRequest(aristaproto.Message): + partial_eq_filter: List["DeviceDecommissioningConfig"] = aristaproto.message_field( + 1 + ) + """ + PartialEqFilter provides a way to server-side filter a DeleteAll. + This requires all provided fields to be equal to the response. + A filtered DeleteAll will use GetAll with filter to find things to delete. + """ + + +@dataclass(eq=False, repr=False) +class DeviceDecommissioningConfigDeleteAllResponse(aristaproto.Message): + type: "___fmp__.DeleteError" = aristaproto.enum_field(1) + """ + This describes the class of delete error. + A DeleteAllResponse is only sent when there is an error. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """This indicates the error message from the delete failure.""" + + key: "UuidKey" = aristaproto.message_field(3) + """ + This is the key of the DeviceDecommissioningConfig instance that failed to be deleted. + """ + + time: datetime = aristaproto.message_field(4) + """Time indicates the (UTC) timestamp when the key was being deleted.""" + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingRequest(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key uniquely identifies a DeviceOnboarding instance to retrieve. + This value must be populated. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingResponse(aristaproto.Message): + value: "DeviceOnboarding" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time carries the (UTC) timestamp of the last-modification of the + DeviceOnboarding instance in this response. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingSomeRequest(aristaproto.Message): + keys: List["UuidKey"] = aristaproto.message_field(1) + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingSomeResponse(aristaproto.Message): + value: "DeviceOnboarding" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """ + Error is an optional field. + It should be filled when there is an error in the GetSome process. + """ + + time: datetime = aristaproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceOnboarding"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceOnboarding at end. + * Each DeviceOnboarding response is fully-specified (all fields set). + * start: Returns the state of each DeviceOnboarding at start, followed by updates until now. + * Each DeviceOnboarding response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceOnboarding at start, followed by updates + until end. + * Each DeviceOnboarding response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingStreamResponse(aristaproto.Message): + value: "DeviceOnboarding" = aristaproto.message_field(1) + """ + Value is a value deemed relevant to the initiating request. + This structure will always have its key-field populated. Which other fields are + populated, and why, depends on the value of Operation and what triggered this notification. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time holds the timestamp of this DeviceOnboarding's last modification. + """ + + type: "__subscriptions__.Operation" = aristaproto.enum_field(3) + """ + Operation indicates how the DeviceOnboarding value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceOnboarding"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceOnboarding at end. + * Each DeviceOnboarding response is fully-specified (all fields set). + * start: Returns the state of each DeviceOnboarding at start, followed by updates until now. + * Each DeviceOnboarding response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceOnboarding at start, followed by updates + until end. + * Each DeviceOnboarding response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingBatchedStreamResponse(aristaproto.Message): + responses: List["DeviceOnboardingStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigRequest(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key uniquely identifies a DeviceOnboardingConfig instance to retrieve. + This value must be populated. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigResponse(aristaproto.Message): + value: "DeviceOnboardingConfig" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time carries the (UTC) timestamp of the last-modification of the + DeviceOnboardingConfig instance in this response. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigSomeRequest(aristaproto.Message): + keys: List["UuidKey"] = aristaproto.message_field(1) + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigSomeResponse(aristaproto.Message): + value: "DeviceOnboardingConfig" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """ + Error is an optional field. + It should be filled when there is an error in the GetSome process. + """ + + time: datetime = aristaproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceOnboardingConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceOnboardingConfig at end. + * Each DeviceOnboardingConfig response is fully-specified (all fields set). + * start: Returns the state of each DeviceOnboardingConfig at start, followed by updates until now. + * Each DeviceOnboardingConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceOnboardingConfig at start, followed by updates + until end. + * Each DeviceOnboardingConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigStreamResponse(aristaproto.Message): + value: "DeviceOnboardingConfig" = aristaproto.message_field(1) + """ + Value is a value deemed relevant to the initiating request. + This structure will always have its key-field populated. Which other fields are + populated, and why, depends on the value of Operation and what triggered this notification. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time holds the timestamp of this DeviceOnboardingConfig's last modification. + """ + + type: "__subscriptions__.Operation" = aristaproto.enum_field(3) + """ + Operation indicates how the DeviceOnboardingConfig value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["DeviceOnboardingConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each DeviceOnboardingConfig at end. + * Each DeviceOnboardingConfig response is fully-specified (all fields set). + * start: Returns the state of each DeviceOnboardingConfig at start, followed by updates until now. + * Each DeviceOnboardingConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each DeviceOnboardingConfig at start, followed by updates + until end. + * Each DeviceOnboardingConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigBatchedStreamResponse(aristaproto.Message): + responses: List["DeviceOnboardingConfigStreamResponse"] = aristaproto.message_field( + 1 + ) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigSetRequest(aristaproto.Message): + value: "DeviceOnboardingConfig" = aristaproto.message_field(1) + """ + DeviceOnboardingConfig carries the value to set into the datastore. + See the documentation on the DeviceOnboardingConfig struct for which fields are required. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigSetResponse(aristaproto.Message): + value: "DeviceOnboardingConfig" = aristaproto.message_field(1) + """ + Value carries all the values given in the DeviceOnboardingConfigSetRequest as well + as any server-generated values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the (UTC) timestamp at which the system recognizes the + creation. The only guarantees made about this timestamp are: + + - it is after the time the request was received + - a time-ranged query with StartTime==CreatedAt will include this instance. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigSetSomeRequest(aristaproto.Message): + values: List["DeviceOnboardingConfig"] = aristaproto.message_field(1) + """ + value contains a list of DeviceOnboardingConfig values to write. + It is possible to provide more values than can fit within either: + - the maxiumum send size of the client + - the maximum receive size of the server + If this error occurs you must reduce the number of values sent. + See gRPC "maximum message size" documentation for more information. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigSetSomeResponse(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + error: str = aristaproto.string_field(2) + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigDeleteRequest(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key indicates which DeviceOnboardingConfig instance to remove. + This field must always be set. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigDeleteResponse(aristaproto.Message): + key: "UuidKey" = aristaproto.message_field(1) + """ + Key echoes back the key of the deleted DeviceOnboardingConfig instance. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the (UTC) timestamp at which the system recognizes the + deletion. The only guarantees made about this timestamp are: + + - it is after the time the request was received + - a time-ranged query with StartTime==DeletedAt will not include this instance. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigDeleteSomeRequest(aristaproto.Message): + keys: List["UuidKey"] = aristaproto.message_field(1) + """key contains a list of DeviceOnboardingConfig keys to delete""" + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigDeleteSomeResponse(aristaproto.Message): + """ + DeviceOnboardingConfigDeleteSomeResponse is only sent when there is an error. + """ + + key: "UuidKey" = aristaproto.message_field(1) + error: str = aristaproto.string_field(2) + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigDeleteAllRequest(aristaproto.Message): + partial_eq_filter: List["DeviceOnboardingConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a DeleteAll. + This requires all provided fields to be equal to the response. + A filtered DeleteAll will use GetAll with filter to find things to delete. + """ + + +@dataclass(eq=False, repr=False) +class DeviceOnboardingConfigDeleteAllResponse(aristaproto.Message): + type: "___fmp__.DeleteError" = aristaproto.enum_field(1) + """ + This describes the class of delete error. + A DeleteAllResponse is only sent when there is an error. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """This indicates the error message from the delete failure.""" + + key: "UuidKey" = aristaproto.message_field(3) + """ + This is the key of the DeviceOnboardingConfig instance that failed to be deleted. + """ + + time: datetime = aristaproto.message_field(4) + """Time indicates the (UTC) timestamp when the key was being deleted.""" + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceRequest(aristaproto.Message): + key: "DeviceKey" = aristaproto.message_field(1) + """ + Key uniquely identifies a ProvisionedDevice instance to retrieve. + This value must be populated. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceResponse(aristaproto.Message): + value: "ProvisionedDevice" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time carries the (UTC) timestamp of the last-modification of the + ProvisionedDevice instance in this response. + """ + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceSomeRequest(aristaproto.Message): + keys: List["DeviceKey"] = aristaproto.message_field(1) + time: datetime = aristaproto.message_field(2) + """ + Time indicates the time for which you are interested in the data. + If no time is given, the server will use the time at which it makes the request. + """ + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceSomeResponse(aristaproto.Message): + value: "ProvisionedDevice" = aristaproto.message_field(1) + """ + Value is the value requested. + This structure will be fully-populated as it exists in the datastore. If + optional fields were not given at creation, these fields will be empty or + set to default values. + """ + + error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) + """ + Error is an optional field. + It should be filled when there is an error in the GetSome process. + """ + + time: datetime = aristaproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceStreamRequest(aristaproto.Message): + partial_eq_filter: List["ProvisionedDevice"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ProvisionedDevice at end. + * Each ProvisionedDevice response is fully-specified (all fields set). + * start: Returns the state of each ProvisionedDevice at start, followed by updates until now. + * Each ProvisionedDevice response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ProvisionedDevice at start, followed by updates + until end. + * Each ProvisionedDevice response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceStreamResponse(aristaproto.Message): + value: "ProvisionedDevice" = aristaproto.message_field(1) + """ + Value is a value deemed relevant to the initiating request. + This structure will always have its key-field populated. Which other fields are + populated, and why, depends on the value of Operation and what triggered this notification. + """ + + time: datetime = aristaproto.message_field(2) + """ + Time holds the timestamp of this ProvisionedDevice's last modification. + """ + + type: "__subscriptions__.Operation" = aristaproto.enum_field(3) + """ + Operation indicates how the ProvisionedDevice value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["ProvisionedDevice"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each ProvisionedDevice at end. + * Each ProvisionedDevice response is fully-specified (all fields set). + * start: Returns the state of each ProvisionedDevice at start, followed by updates until now. + * Each ProvisionedDevice response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each ProvisionedDevice at start, followed by updates + until end. + * Each ProvisionedDevice response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class ProvisionedDeviceBatchedStreamResponse(aristaproto.Message): + responses: List["ProvisionedDeviceStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + +class DeviceServiceStub(aristaproto.ServiceStub): + async def get_one( + self, + device_request: "DeviceRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceService/GetOne", + device_request, + DeviceResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def get_some( + self, + device_some_request: "DeviceSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceService/GetSome", + device_some_request, + DeviceSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all( + self, + device_stream_request: "DeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceService/GetAll", + device_stream_request, + DeviceStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe( + self, + device_stream_request: "DeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceService/Subscribe", + device_stream_request, + DeviceStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_meta( + self, + device_stream_request: "DeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "MetaResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceService/GetMeta", + device_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def subscribe_meta( + self, + device_stream_request: "DeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceService/SubscribeMeta", + device_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + device_batched_stream_request: "DeviceBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceService/GetAllBatched", + device_batched_stream_request, + DeviceBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + device_batched_stream_request: "DeviceBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceService/SubscribeBatched", + device_batched_stream_request, + DeviceBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + +class DeviceDecommissioningServiceStub(aristaproto.ServiceStub): + async def get_one( + self, + device_decommissioning_request: "DeviceDecommissioningRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceDecommissioningResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceDecommissioningService/GetOne", + device_decommissioning_request, + DeviceDecommissioningResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def get_some( + self, + device_decommissioning_some_request: "DeviceDecommissioningSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningService/GetSome", + device_decommissioning_some_request, + DeviceDecommissioningSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningService/GetAll", + device_decommissioning_stream_request, + DeviceDecommissioningStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningService/Subscribe", + device_decommissioning_stream_request, + DeviceDecommissioningStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_meta( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "MetaResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceDecommissioningService/GetMeta", + device_decommissioning_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def subscribe_meta( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningService/SubscribeMeta", + device_decommissioning_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + device_decommissioning_batched_stream_request: "DeviceDecommissioningBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningService/GetAllBatched", + device_decommissioning_batched_stream_request, + DeviceDecommissioningBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + device_decommissioning_batched_stream_request: "DeviceDecommissioningBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningService/SubscribeBatched", + device_decommissioning_batched_stream_request, + DeviceDecommissioningBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + +class DeviceDecommissioningConfigServiceStub(aristaproto.ServiceStub): + async def get_one( + self, + device_decommissioning_config_request: "DeviceDecommissioningConfigRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceDecommissioningConfigResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetOne", + device_decommissioning_config_request, + DeviceDecommissioningConfigResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def get_some( + self, + device_decommissioning_config_some_request: "DeviceDecommissioningConfigSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetSome", + device_decommissioning_config_some_request, + DeviceDecommissioningConfigSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetAll", + device_decommissioning_config_stream_request, + DeviceDecommissioningConfigStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/Subscribe", + device_decommissioning_config_stream_request, + DeviceDecommissioningConfigStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_meta( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "MetaResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetMeta", + device_decommissioning_config_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def subscribe_meta( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/SubscribeMeta", + device_decommissioning_config_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def set( + self, + device_decommissioning_config_set_request: "DeviceDecommissioningConfigSetRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceDecommissioningConfigSetResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceDecommissioningConfigService/Set", + device_decommissioning_config_set_request, + DeviceDecommissioningConfigSetResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def set_some( + self, + device_decommissioning_config_set_some_request: "DeviceDecommissioningConfigSetSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigSetSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/SetSome", + device_decommissioning_config_set_some_request, + DeviceDecommissioningConfigSetSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def delete( + self, + device_decommissioning_config_delete_request: "DeviceDecommissioningConfigDeleteRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceDecommissioningConfigDeleteResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceDecommissioningConfigService/Delete", + device_decommissioning_config_delete_request, + DeviceDecommissioningConfigDeleteResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def delete_some( + self, + device_decommissioning_config_delete_some_request: "DeviceDecommissioningConfigDeleteSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigDeleteSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/DeleteSome", + device_decommissioning_config_delete_some_request, + DeviceDecommissioningConfigDeleteSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def delete_all( + self, + device_decommissioning_config_delete_all_request: "DeviceDecommissioningConfigDeleteAllRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigDeleteAllResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/DeleteAll", + device_decommissioning_config_delete_all_request, + DeviceDecommissioningConfigDeleteAllResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + device_decommissioning_config_batched_stream_request: "DeviceDecommissioningConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetAllBatched", + device_decommissioning_config_batched_stream_request, + DeviceDecommissioningConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + device_decommissioning_config_batched_stream_request: "DeviceDecommissioningConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceDecommissioningConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceDecommissioningConfigService/SubscribeBatched", + device_decommissioning_config_batched_stream_request, + DeviceDecommissioningConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + +class DeviceOnboardingServiceStub(aristaproto.ServiceStub): + async def get_one( + self, + device_onboarding_request: "DeviceOnboardingRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceOnboardingResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceOnboardingService/GetOne", + device_onboarding_request, + DeviceOnboardingResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def get_some( + self, + device_onboarding_some_request: "DeviceOnboardingSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingService/GetSome", + device_onboarding_some_request, + DeviceOnboardingSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all( + self, + device_onboarding_stream_request: "DeviceOnboardingStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingService/GetAll", + device_onboarding_stream_request, + DeviceOnboardingStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe( + self, + device_onboarding_stream_request: "DeviceOnboardingStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingService/Subscribe", + device_onboarding_stream_request, + DeviceOnboardingStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_meta( + self, + device_onboarding_stream_request: "DeviceOnboardingStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "MetaResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceOnboardingService/GetMeta", + device_onboarding_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def subscribe_meta( + self, + device_onboarding_stream_request: "DeviceOnboardingStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingService/SubscribeMeta", + device_onboarding_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + device_onboarding_batched_stream_request: "DeviceOnboardingBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingService/GetAllBatched", + device_onboarding_batched_stream_request, + DeviceOnboardingBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + device_onboarding_batched_stream_request: "DeviceOnboardingBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingService/SubscribeBatched", + device_onboarding_batched_stream_request, + DeviceOnboardingBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + +class DeviceOnboardingConfigServiceStub(aristaproto.ServiceStub): + async def get_one( + self, + device_onboarding_config_request: "DeviceOnboardingConfigRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceOnboardingConfigResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceOnboardingConfigService/GetOne", + device_onboarding_config_request, + DeviceOnboardingConfigResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def get_some( + self, + device_onboarding_config_some_request: "DeviceOnboardingConfigSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/GetSome", + device_onboarding_config_some_request, + DeviceOnboardingConfigSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/GetAll", + device_onboarding_config_stream_request, + DeviceOnboardingConfigStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/Subscribe", + device_onboarding_config_stream_request, + DeviceOnboardingConfigStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_meta( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "MetaResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceOnboardingConfigService/GetMeta", + device_onboarding_config_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def subscribe_meta( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/SubscribeMeta", + device_onboarding_config_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def set( + self, + device_onboarding_config_set_request: "DeviceOnboardingConfigSetRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceOnboardingConfigSetResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceOnboardingConfigService/Set", + device_onboarding_config_set_request, + DeviceOnboardingConfigSetResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def set_some( + self, + device_onboarding_config_set_some_request: "DeviceOnboardingConfigSetSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigSetSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/SetSome", + device_onboarding_config_set_some_request, + DeviceOnboardingConfigSetSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def delete( + self, + device_onboarding_config_delete_request: "DeviceOnboardingConfigDeleteRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "DeviceOnboardingConfigDeleteResponse": + return await self._unary_unary( + "/arista.inventory.v1.DeviceOnboardingConfigService/Delete", + device_onboarding_config_delete_request, + DeviceOnboardingConfigDeleteResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def delete_some( + self, + device_onboarding_config_delete_some_request: "DeviceOnboardingConfigDeleteSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigDeleteSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/DeleteSome", + device_onboarding_config_delete_some_request, + DeviceOnboardingConfigDeleteSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def delete_all( + self, + device_onboarding_config_delete_all_request: "DeviceOnboardingConfigDeleteAllRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigDeleteAllResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/DeleteAll", + device_onboarding_config_delete_all_request, + DeviceOnboardingConfigDeleteAllResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + device_onboarding_config_batched_stream_request: "DeviceOnboardingConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/GetAllBatched", + device_onboarding_config_batched_stream_request, + DeviceOnboardingConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + device_onboarding_config_batched_stream_request: "DeviceOnboardingConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["DeviceOnboardingConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.DeviceOnboardingConfigService/SubscribeBatched", + device_onboarding_config_batched_stream_request, + DeviceOnboardingConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + +class ProvisionedDeviceServiceStub(aristaproto.ServiceStub): + async def get_one( + self, + provisioned_device_request: "ProvisionedDeviceRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "ProvisionedDeviceResponse": + return await self._unary_unary( + "/arista.inventory.v1.ProvisionedDeviceService/GetOne", + provisioned_device_request, + ProvisionedDeviceResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def get_some( + self, + provisioned_device_some_request: "ProvisionedDeviceSomeRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProvisionedDeviceSomeResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.ProvisionedDeviceService/GetSome", + provisioned_device_some_request, + ProvisionedDeviceSomeResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all( + self, + provisioned_device_stream_request: "ProvisionedDeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.ProvisionedDeviceService/GetAll", + provisioned_device_stream_request, + ProvisionedDeviceStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe( + self, + provisioned_device_stream_request: "ProvisionedDeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.ProvisionedDeviceService/Subscribe", + provisioned_device_stream_request, + ProvisionedDeviceStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_meta( + self, + provisioned_device_stream_request: "ProvisionedDeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> "MetaResponse": + return await self._unary_unary( + "/arista.inventory.v1.ProvisionedDeviceService/GetMeta", + provisioned_device_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ) + + async def subscribe_meta( + self, + provisioned_device_stream_request: "ProvisionedDeviceStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.ProvisionedDeviceService/SubscribeMeta", + provisioned_device_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + provisioned_device_batched_stream_request: "ProvisionedDeviceBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProvisionedDeviceBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.ProvisionedDeviceService/GetAllBatched", + provisioned_device_batched_stream_request, + ProvisionedDeviceBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + provisioned_device_batched_stream_request: "ProvisionedDeviceBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["ProvisionedDeviceBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.inventory.v1.ProvisionedDeviceService/SubscribeBatched", + provisioned_device_batched_stream_request, + ProvisionedDeviceBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + +class DeviceServiceBase(ServiceBase): + + async def get_one(self, device_request: "DeviceRequest") -> "DeviceResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_some( + self, device_some_request: "DeviceSomeRequest" + ) -> AsyncIterator["DeviceSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all( + self, device_stream_request: "DeviceStreamRequest" + ) -> AsyncIterator["DeviceStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe( + self, device_stream_request: "DeviceStreamRequest" + ) -> AsyncIterator["DeviceStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_meta( + self, device_stream_request: "DeviceStreamRequest" + ) -> "MetaResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_meta( + self, device_stream_request: "DeviceStreamRequest" + ) -> AsyncIterator["MetaResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all_batched( + self, device_batched_stream_request: "DeviceBatchedStreamRequest" + ) -> AsyncIterator["DeviceBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, device_batched_stream_request: "DeviceBatchedStreamRequest" + ) -> AsyncIterator["DeviceBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_get_one( + self, stream: "grpclib.server.Stream[DeviceRequest, DeviceResponse]" + ) -> None: + request = await stream.recv_message() + response = await self.get_one(request) + await stream.send_message(response) + + async def __rpc_get_some( + self, stream: "grpclib.server.Stream[DeviceSomeRequest, DeviceSomeResponse]" + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_some, + stream, + request, + ) + + async def __rpc_get_all( + self, stream: "grpclib.server.Stream[DeviceStreamRequest, DeviceStreamResponse]" + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all, + stream, + request, + ) + + async def __rpc_subscribe( + self, stream: "grpclib.server.Stream[DeviceStreamRequest, DeviceStreamResponse]" + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe, + stream, + request, + ) + + async def __rpc_get_meta( + self, stream: "grpclib.server.Stream[DeviceStreamRequest, MetaResponse]" + ) -> None: + request = await stream.recv_message() + response = await self.get_meta(request) + await stream.send_message(response) + + async def __rpc_subscribe_meta( + self, stream: "grpclib.server.Stream[DeviceStreamRequest, MetaResponse]" + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_meta, + stream, + request, + ) + + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DeviceBatchedStreamRequest, DeviceBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DeviceBatchedStreamRequest, DeviceBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/arista.inventory.v1.DeviceService/GetOne": grpclib.const.Handler( + self.__rpc_get_one, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceRequest, + DeviceResponse, + ), + "/arista.inventory.v1.DeviceService/GetSome": grpclib.const.Handler( + self.__rpc_get_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceSomeRequest, + DeviceSomeResponse, + ), + "/arista.inventory.v1.DeviceService/GetAll": grpclib.const.Handler( + self.__rpc_get_all, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceStreamRequest, + DeviceStreamResponse, + ), + "/arista.inventory.v1.DeviceService/Subscribe": grpclib.const.Handler( + self.__rpc_subscribe, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceStreamRequest, + DeviceStreamResponse, + ), + "/arista.inventory.v1.DeviceService/GetMeta": grpclib.const.Handler( + self.__rpc_get_meta, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceService/SubscribeMeta": grpclib.const.Handler( + self.__rpc_subscribe_meta, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceBatchedStreamRequest, + DeviceBatchedStreamResponse, + ), + "/arista.inventory.v1.DeviceService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceBatchedStreamRequest, + DeviceBatchedStreamResponse, + ), + } + + +class DeviceDecommissioningServiceBase(ServiceBase): + + async def get_one( + self, device_decommissioning_request: "DeviceDecommissioningRequest" + ) -> "DeviceDecommissioningResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_some( + self, device_decommissioning_some_request: "DeviceDecommissioningSomeRequest" + ) -> AsyncIterator["DeviceDecommissioningSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_meta( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + ) -> "MetaResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_meta( + self, + device_decommissioning_stream_request: "DeviceDecommissioningStreamRequest", + ) -> AsyncIterator["MetaResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all_batched( + self, + device_decommissioning_batched_stream_request: "DeviceDecommissioningBatchedStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + device_decommissioning_batched_stream_request: "DeviceDecommissioningBatchedStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_get_one( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningRequest, DeviceDecommissioningResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_one(request) + await stream.send_message(response) + + async def __rpc_get_some( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningSomeRequest, DeviceDecommissioningSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_some, + stream, + request, + ) + + async def __rpc_get_all( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningStreamRequest, DeviceDecommissioningStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all, + stream, + request, + ) + + async def __rpc_subscribe( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningStreamRequest, DeviceDecommissioningStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe, + stream, + request, + ) + + async def __rpc_get_meta( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_meta(request) + await stream.send_message(response) + + async def __rpc_subscribe_meta( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_meta, + stream, + request, + ) + + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningBatchedStreamRequest, DeviceDecommissioningBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningBatchedStreamRequest, DeviceDecommissioningBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/arista.inventory.v1.DeviceDecommissioningService/GetOne": grpclib.const.Handler( + self.__rpc_get_one, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceDecommissioningRequest, + DeviceDecommissioningResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningService/GetSome": grpclib.const.Handler( + self.__rpc_get_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningSomeRequest, + DeviceDecommissioningSomeResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningService/GetAll": grpclib.const.Handler( + self.__rpc_get_all, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningStreamRequest, + DeviceDecommissioningStreamResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningService/Subscribe": grpclib.const.Handler( + self.__rpc_subscribe, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningStreamRequest, + DeviceDecommissioningStreamResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningService/GetMeta": grpclib.const.Handler( + self.__rpc_get_meta, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceDecommissioningStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningService/SubscribeMeta": grpclib.const.Handler( + self.__rpc_subscribe_meta, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningBatchedStreamRequest, + DeviceDecommissioningBatchedStreamResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningBatchedStreamRequest, + DeviceDecommissioningBatchedStreamResponse, + ), + } + + +class DeviceDecommissioningConfigServiceBase(ServiceBase): + + async def get_one( + self, + device_decommissioning_config_request: "DeviceDecommissioningConfigRequest", + ) -> "DeviceDecommissioningConfigResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_some( + self, + device_decommissioning_config_some_request: "DeviceDecommissioningConfigSomeRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_meta( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + ) -> "MetaResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_meta( + self, + device_decommissioning_config_stream_request: "DeviceDecommissioningConfigStreamRequest", + ) -> AsyncIterator["MetaResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def set( + self, + device_decommissioning_config_set_request: "DeviceDecommissioningConfigSetRequest", + ) -> "DeviceDecommissioningConfigSetResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def set_some( + self, + device_decommissioning_config_set_some_request: "DeviceDecommissioningConfigSetSomeRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigSetSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def delete( + self, + device_decommissioning_config_delete_request: "DeviceDecommissioningConfigDeleteRequest", + ) -> "DeviceDecommissioningConfigDeleteResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def delete_some( + self, + device_decommissioning_config_delete_some_request: "DeviceDecommissioningConfigDeleteSomeRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigDeleteSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def delete_all( + self, + device_decommissioning_config_delete_all_request: "DeviceDecommissioningConfigDeleteAllRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigDeleteAllResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all_batched( + self, + device_decommissioning_config_batched_stream_request: "DeviceDecommissioningConfigBatchedStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + device_decommissioning_config_batched_stream_request: "DeviceDecommissioningConfigBatchedStreamRequest", + ) -> AsyncIterator["DeviceDecommissioningConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_get_one( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigRequest, DeviceDecommissioningConfigResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_one(request) + await stream.send_message(response) + + async def __rpc_get_some( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigSomeRequest, DeviceDecommissioningConfigSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_some, + stream, + request, + ) + + async def __rpc_get_all( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigStreamRequest, DeviceDecommissioningConfigStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all, + stream, + request, + ) + + async def __rpc_subscribe( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigStreamRequest, DeviceDecommissioningConfigStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe, + stream, + request, + ) + + async def __rpc_get_meta( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_meta(request) + await stream.send_message(response) + + async def __rpc_subscribe_meta( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_meta, + stream, + request, + ) + + async def __rpc_set( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigSetRequest, DeviceDecommissioningConfigSetResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.set(request) + await stream.send_message(response) + + async def __rpc_set_some( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigSetSomeRequest, DeviceDecommissioningConfigSetSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.set_some, + stream, + request, + ) + + async def __rpc_delete( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigDeleteRequest, DeviceDecommissioningConfigDeleteResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.delete(request) + await stream.send_message(response) + + async def __rpc_delete_some( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigDeleteSomeRequest, DeviceDecommissioningConfigDeleteSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.delete_some, + stream, + request, + ) + + async def __rpc_delete_all( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigDeleteAllRequest, DeviceDecommissioningConfigDeleteAllResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.delete_all, + stream, + request, + ) + + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigBatchedStreamRequest, DeviceDecommissioningConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DeviceDecommissioningConfigBatchedStreamRequest, DeviceDecommissioningConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetOne": grpclib.const.Handler( + self.__rpc_get_one, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceDecommissioningConfigRequest, + DeviceDecommissioningConfigResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetSome": grpclib.const.Handler( + self.__rpc_get_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigSomeRequest, + DeviceDecommissioningConfigSomeResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetAll": grpclib.const.Handler( + self.__rpc_get_all, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigStreamRequest, + DeviceDecommissioningConfigStreamResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/Subscribe": grpclib.const.Handler( + self.__rpc_subscribe, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigStreamRequest, + DeviceDecommissioningConfigStreamResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetMeta": grpclib.const.Handler( + self.__rpc_get_meta, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceDecommissioningConfigStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/SubscribeMeta": grpclib.const.Handler( + self.__rpc_subscribe_meta, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/Set": grpclib.const.Handler( + self.__rpc_set, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceDecommissioningConfigSetRequest, + DeviceDecommissioningConfigSetResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/SetSome": grpclib.const.Handler( + self.__rpc_set_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigSetSomeRequest, + DeviceDecommissioningConfigSetSomeResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/Delete": grpclib.const.Handler( + self.__rpc_delete, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceDecommissioningConfigDeleteRequest, + DeviceDecommissioningConfigDeleteResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/DeleteSome": grpclib.const.Handler( + self.__rpc_delete_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigDeleteSomeRequest, + DeviceDecommissioningConfigDeleteSomeResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/DeleteAll": grpclib.const.Handler( + self.__rpc_delete_all, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigDeleteAllRequest, + DeviceDecommissioningConfigDeleteAllResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigBatchedStreamRequest, + DeviceDecommissioningConfigBatchedStreamResponse, + ), + "/arista.inventory.v1.DeviceDecommissioningConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceDecommissioningConfigBatchedStreamRequest, + DeviceDecommissioningConfigBatchedStreamResponse, + ), + } + + +class DeviceOnboardingServiceBase(ServiceBase): + + async def get_one( + self, device_onboarding_request: "DeviceOnboardingRequest" + ) -> "DeviceOnboardingResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_some( + self, device_onboarding_some_request: "DeviceOnboardingSomeRequest" + ) -> AsyncIterator["DeviceOnboardingSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all( + self, device_onboarding_stream_request: "DeviceOnboardingStreamRequest" + ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe( + self, device_onboarding_stream_request: "DeviceOnboardingStreamRequest" + ) -> AsyncIterator["DeviceOnboardingStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_meta( + self, device_onboarding_stream_request: "DeviceOnboardingStreamRequest" + ) -> "MetaResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_meta( + self, device_onboarding_stream_request: "DeviceOnboardingStreamRequest" + ) -> AsyncIterator["MetaResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all_batched( + self, + device_onboarding_batched_stream_request: "DeviceOnboardingBatchedStreamRequest", + ) -> AsyncIterator["DeviceOnboardingBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + device_onboarding_batched_stream_request: "DeviceOnboardingBatchedStreamRequest", + ) -> AsyncIterator["DeviceOnboardingBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_get_one( + self, + stream: "grpclib.server.Stream[DeviceOnboardingRequest, DeviceOnboardingResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_one(request) + await stream.send_message(response) + + async def __rpc_get_some( + self, + stream: "grpclib.server.Stream[DeviceOnboardingSomeRequest, DeviceOnboardingSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_some, + stream, + request, + ) + + async def __rpc_get_all( + self, + stream: "grpclib.server.Stream[DeviceOnboardingStreamRequest, DeviceOnboardingStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all, + stream, + request, + ) + + async def __rpc_subscribe( + self, + stream: "grpclib.server.Stream[DeviceOnboardingStreamRequest, DeviceOnboardingStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe, + stream, + request, + ) + + async def __rpc_get_meta( + self, + stream: "grpclib.server.Stream[DeviceOnboardingStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_meta(request) + await stream.send_message(response) + + async def __rpc_subscribe_meta( + self, + stream: "grpclib.server.Stream[DeviceOnboardingStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_meta, + stream, + request, + ) + + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DeviceOnboardingBatchedStreamRequest, DeviceOnboardingBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DeviceOnboardingBatchedStreamRequest, DeviceOnboardingBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/arista.inventory.v1.DeviceOnboardingService/GetOne": grpclib.const.Handler( + self.__rpc_get_one, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceOnboardingRequest, + DeviceOnboardingResponse, + ), + "/arista.inventory.v1.DeviceOnboardingService/GetSome": grpclib.const.Handler( + self.__rpc_get_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingSomeRequest, + DeviceOnboardingSomeResponse, + ), + "/arista.inventory.v1.DeviceOnboardingService/GetAll": grpclib.const.Handler( + self.__rpc_get_all, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingStreamRequest, + DeviceOnboardingStreamResponse, + ), + "/arista.inventory.v1.DeviceOnboardingService/Subscribe": grpclib.const.Handler( + self.__rpc_subscribe, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingStreamRequest, + DeviceOnboardingStreamResponse, + ), + "/arista.inventory.v1.DeviceOnboardingService/GetMeta": grpclib.const.Handler( + self.__rpc_get_meta, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceOnboardingStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceOnboardingService/SubscribeMeta": grpclib.const.Handler( + self.__rpc_subscribe_meta, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceOnboardingService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingBatchedStreamRequest, + DeviceOnboardingBatchedStreamResponse, + ), + "/arista.inventory.v1.DeviceOnboardingService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingBatchedStreamRequest, + DeviceOnboardingBatchedStreamResponse, + ), + } + + +class DeviceOnboardingConfigServiceBase(ServiceBase): + + async def get_one( + self, device_onboarding_config_request: "DeviceOnboardingConfigRequest" + ) -> "DeviceOnboardingConfigResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_some( + self, device_onboarding_config_some_request: "DeviceOnboardingConfigSomeRequest" + ) -> AsyncIterator["DeviceOnboardingConfigSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + ) -> AsyncIterator["DeviceOnboardingConfigStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_meta( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + ) -> "MetaResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_meta( + self, + device_onboarding_config_stream_request: "DeviceOnboardingConfigStreamRequest", + ) -> AsyncIterator["MetaResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def set( + self, device_onboarding_config_set_request: "DeviceOnboardingConfigSetRequest" + ) -> "DeviceOnboardingConfigSetResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def set_some( + self, + device_onboarding_config_set_some_request: "DeviceOnboardingConfigSetSomeRequest", + ) -> AsyncIterator["DeviceOnboardingConfigSetSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def delete( + self, + device_onboarding_config_delete_request: "DeviceOnboardingConfigDeleteRequest", + ) -> "DeviceOnboardingConfigDeleteResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def delete_some( + self, + device_onboarding_config_delete_some_request: "DeviceOnboardingConfigDeleteSomeRequest", + ) -> AsyncIterator["DeviceOnboardingConfigDeleteSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def delete_all( + self, + device_onboarding_config_delete_all_request: "DeviceOnboardingConfigDeleteAllRequest", + ) -> AsyncIterator["DeviceOnboardingConfigDeleteAllResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all_batched( + self, + device_onboarding_config_batched_stream_request: "DeviceOnboardingConfigBatchedStreamRequest", + ) -> AsyncIterator["DeviceOnboardingConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + device_onboarding_config_batched_stream_request: "DeviceOnboardingConfigBatchedStreamRequest", + ) -> AsyncIterator["DeviceOnboardingConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_get_one( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigRequest, DeviceOnboardingConfigResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_one(request) + await stream.send_message(response) + + async def __rpc_get_some( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigSomeRequest, DeviceOnboardingConfigSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_some, + stream, + request, + ) + + async def __rpc_get_all( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigStreamRequest, DeviceOnboardingConfigStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all, + stream, + request, + ) + + async def __rpc_subscribe( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigStreamRequest, DeviceOnboardingConfigStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe, + stream, + request, + ) + + async def __rpc_get_meta( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_meta(request) + await stream.send_message(response) + + async def __rpc_subscribe_meta( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_meta, + stream, + request, + ) + + async def __rpc_set( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigSetRequest, DeviceOnboardingConfigSetResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.set(request) + await stream.send_message(response) + + async def __rpc_set_some( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigSetSomeRequest, DeviceOnboardingConfigSetSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.set_some, + stream, + request, + ) + + async def __rpc_delete( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigDeleteRequest, DeviceOnboardingConfigDeleteResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.delete(request) + await stream.send_message(response) + + async def __rpc_delete_some( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigDeleteSomeRequest, DeviceOnboardingConfigDeleteSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.delete_some, + stream, + request, + ) + + async def __rpc_delete_all( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigDeleteAllRequest, DeviceOnboardingConfigDeleteAllResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.delete_all, + stream, + request, + ) + + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigBatchedStreamRequest, DeviceOnboardingConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[DeviceOnboardingConfigBatchedStreamRequest, DeviceOnboardingConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/arista.inventory.v1.DeviceOnboardingConfigService/GetOne": grpclib.const.Handler( + self.__rpc_get_one, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceOnboardingConfigRequest, + DeviceOnboardingConfigResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/GetSome": grpclib.const.Handler( + self.__rpc_get_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigSomeRequest, + DeviceOnboardingConfigSomeResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/GetAll": grpclib.const.Handler( + self.__rpc_get_all, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigStreamRequest, + DeviceOnboardingConfigStreamResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/Subscribe": grpclib.const.Handler( + self.__rpc_subscribe, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigStreamRequest, + DeviceOnboardingConfigStreamResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/GetMeta": grpclib.const.Handler( + self.__rpc_get_meta, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceOnboardingConfigStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/SubscribeMeta": grpclib.const.Handler( + self.__rpc_subscribe_meta, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/Set": grpclib.const.Handler( + self.__rpc_set, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceOnboardingConfigSetRequest, + DeviceOnboardingConfigSetResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/SetSome": grpclib.const.Handler( + self.__rpc_set_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigSetSomeRequest, + DeviceOnboardingConfigSetSomeResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/Delete": grpclib.const.Handler( + self.__rpc_delete, + grpclib.const.Cardinality.UNARY_UNARY, + DeviceOnboardingConfigDeleteRequest, + DeviceOnboardingConfigDeleteResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/DeleteSome": grpclib.const.Handler( + self.__rpc_delete_some, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigDeleteSomeRequest, + DeviceOnboardingConfigDeleteSomeResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/DeleteAll": grpclib.const.Handler( + self.__rpc_delete_all, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigDeleteAllRequest, + DeviceOnboardingConfigDeleteAllResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigBatchedStreamRequest, + DeviceOnboardingConfigBatchedStreamResponse, + ), + "/arista.inventory.v1.DeviceOnboardingConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + DeviceOnboardingConfigBatchedStreamRequest, + DeviceOnboardingConfigBatchedStreamResponse, + ), + } + + +class ProvisionedDeviceServiceBase(ServiceBase): + + async def get_one( + self, provisioned_device_request: "ProvisionedDeviceRequest" + ) -> "ProvisionedDeviceResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_some( + self, provisioned_device_some_request: "ProvisionedDeviceSomeRequest" + ) -> AsyncIterator["ProvisionedDeviceSomeResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all( + self, provisioned_device_stream_request: "ProvisionedDeviceStreamRequest" + ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe( + self, provisioned_device_stream_request: "ProvisionedDeviceStreamRequest" + ) -> AsyncIterator["ProvisionedDeviceStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_meta( + self, provisioned_device_stream_request: "ProvisionedDeviceStreamRequest" + ) -> "MetaResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_meta( + self, provisioned_device_stream_request: "ProvisionedDeviceStreamRequest" + ) -> AsyncIterator["MetaResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def get_all_batched( + self, + provisioned_device_batched_stream_request: "ProvisionedDeviceBatchedStreamRequest", + ) -> AsyncIterator["ProvisionedDeviceBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + provisioned_device_batched_stream_request: "ProvisionedDeviceBatchedStreamRequest", + ) -> AsyncIterator["ProvisionedDeviceBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_get_one( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceRequest, ProvisionedDeviceResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_one(request) + await stream.send_message(response) + + async def __rpc_get_some( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceSomeRequest, ProvisionedDeviceSomeResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_some, + stream, + request, + ) + + async def __rpc_get_all( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceStreamRequest, ProvisionedDeviceStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all, + stream, + request, + ) + + async def __rpc_subscribe( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceStreamRequest, ProvisionedDeviceStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe, + stream, + request, + ) + + async def __rpc_get_meta( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + response = await self.get_meta(request) + await stream.send_message(response) + + async def __rpc_subscribe_meta( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceStreamRequest, MetaResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_meta, + stream, + request, + ) + + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceBatchedStreamRequest, ProvisionedDeviceBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[ProvisionedDeviceBatchedStreamRequest, ProvisionedDeviceBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/arista.inventory.v1.ProvisionedDeviceService/GetOne": grpclib.const.Handler( + self.__rpc_get_one, + grpclib.const.Cardinality.UNARY_UNARY, + ProvisionedDeviceRequest, + ProvisionedDeviceResponse, + ), + "/arista.inventory.v1.ProvisionedDeviceService/GetSome": grpclib.const.Handler( + self.__rpc_get_some, + grpclib.const.Cardinality.UNARY_STREAM, + ProvisionedDeviceSomeRequest, + ProvisionedDeviceSomeResponse, + ), + "/arista.inventory.v1.ProvisionedDeviceService/GetAll": grpclib.const.Handler( + self.__rpc_get_all, + grpclib.const.Cardinality.UNARY_STREAM, + ProvisionedDeviceStreamRequest, + ProvisionedDeviceStreamResponse, + ), + "/arista.inventory.v1.ProvisionedDeviceService/Subscribe": grpclib.const.Handler( + self.__rpc_subscribe, + grpclib.const.Cardinality.UNARY_STREAM, + ProvisionedDeviceStreamRequest, + ProvisionedDeviceStreamResponse, + ), + "/arista.inventory.v1.ProvisionedDeviceService/GetMeta": grpclib.const.Handler( + self.__rpc_get_meta, + grpclib.const.Cardinality.UNARY_UNARY, + ProvisionedDeviceStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.ProvisionedDeviceService/SubscribeMeta": grpclib.const.Handler( + self.__rpc_subscribe_meta, + grpclib.const.Cardinality.UNARY_STREAM, + ProvisionedDeviceStreamRequest, + MetaResponse, + ), + "/arista.inventory.v1.ProvisionedDeviceService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ProvisionedDeviceBatchedStreamRequest, + ProvisionedDeviceBatchedStreamResponse, + ), + "/arista.inventory.v1.ProvisionedDeviceService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + ProvisionedDeviceBatchedStreamRequest, + ProvisionedDeviceBatchedStreamResponse, + ), + } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/redirector/__init__.py b/python-avd/pyavd/_cv/api/arista/lifecycle/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/redirector/__init__.py rename to python-avd/pyavd/_cv/api/arista/lifecycle/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/lifecycle/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/lifecycle/v1/__init__.py similarity index 97% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/lifecycle/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/lifecycle/v1/__init__.py index a99ff5c999a..0c09eb035ca 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/lifecycle/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/lifecycle/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/serviceaccount/__init__.py b/python-avd/pyavd/_cv/api/arista/redirector/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/serviceaccount/__init__.py rename to python-avd/pyavd/_cv/api/arista/redirector/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/redirector/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/redirector/v1/__init__.py similarity index 74% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/redirector/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/redirector/v1/__init__.py index 43dc64589da..2d4b69a1662 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/redirector/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/redirector/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -222,6 +214,57 @@ class AssignmentStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class AssignmentBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Assignment"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Assignment at end. + * Each Assignment response is fully-specified (all fields set). + * start: Returns the state of each Assignment at start, followed by updates until now. + * Each Assignment response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Assignment at start, followed by updates + until end. + * Each Assignment response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class AssignmentBatchedStreamResponse(aristaproto.Message): + responses: List["AssignmentStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + class AssignmentServiceStub(aristaproto.ServiceStub): async def get_one( self, @@ -329,6 +372,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + assignment_batched_stream_request: "AssignmentBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AssignmentBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.redirector.v1.AssignmentService/GetAllBatched", + assignment_batched_stream_request, + AssignmentBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + assignment_batched_stream_request: "AssignmentBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AssignmentBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.redirector.v1.AssignmentService/SubscribeBatched", + assignment_batched_stream_request, + AssignmentBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class AssignmentServiceBase(ServiceBase): @@ -362,6 +441,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, assignment_batched_stream_request: "AssignmentBatchedStreamRequest" + ) -> AsyncIterator["AssignmentBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, assignment_batched_stream_request: "AssignmentBatchedStreamRequest" + ) -> AsyncIterator["AssignmentBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[AssignmentRequest, AssignmentResponse]" ) -> None: @@ -419,6 +508,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[AssignmentBatchedStreamRequest, AssignmentBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[AssignmentBatchedStreamRequest, AssignmentBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.redirector.v1.AssignmentService/GetOne": grpclib.const.Handler( @@ -457,4 +568,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: AssignmentStreamRequest, MetaResponse, ), + "/arista.redirector.v1.AssignmentService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AssignmentBatchedStreamRequest, + AssignmentBatchedStreamResponse, + ), + "/arista.redirector.v1.AssignmentService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AssignmentBatchedStreamRequest, + AssignmentBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/studio/__init__.py b/python-avd/pyavd/_cv/api/arista/serviceaccount/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/studio/__init__.py rename to python-avd/pyavd/_cv/api/arista/serviceaccount/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/serviceaccount/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/serviceaccount/v1/__init__.py similarity index 98% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/serviceaccount/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/serviceaccount/v1/__init__.py index e9d72ad55b2..f05dd4f717a 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/serviceaccount/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/serviceaccount/v1/__init__.py @@ -19,17 +19,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -538,13 +530,21 @@ class AccountConfigDeleteSomeResponse(aristaproto.Message): @dataclass(eq=False, repr=False) class AccountConfigDeleteAllRequest(aristaproto.Message): - pass + partial_eq_filter: List["AccountConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a DeleteAll. + This requires all provided fields to be equal to the response. + A filtered DeleteAll will use GetAll with filter to find things to delete. + """ @dataclass(eq=False, repr=False) class AccountConfigDeleteAllResponse(aristaproto.Message): type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" + """ + This describes the class of delete error. + A DeleteAllResponse is only sent when there is an error. + """ error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) """This indicates the error message from the delete failure.""" @@ -870,13 +870,21 @@ class TokenConfigDeleteSomeResponse(aristaproto.Message): @dataclass(eq=False, repr=False) class TokenConfigDeleteAllRequest(aristaproto.Message): - pass + partial_eq_filter: List["TokenConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a DeleteAll. + This requires all provided fields to be equal to the response. + A filtered DeleteAll will use GetAll with filter to find things to delete. + """ @dataclass(eq=False, repr=False) class TokenConfigDeleteAllResponse(aristaproto.Message): type: "___fmp__.DeleteError" = aristaproto.enum_field(1) - """This describes the class of delete error.""" + """ + This describes the class of delete error. + A DeleteAllResponse is only sent when there is an error. + """ error: Optional[str] = aristaproto.message_field(2, wraps=aristaproto.TYPE_STRING) """This indicates the error message from the delete failure.""" diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/swg/__init__.py b/python-avd/pyavd/_cv/api/arista/studio/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/swg/__init__.py rename to python-avd/pyavd/_cv/api/arista/studio/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/studio/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/studio/v1/__init__.py similarity index 80% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/studio/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/studio/v1/__init__.py index 424ff83cc58..7901666a037 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/studio/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/studio/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( @@ -1450,6 +1442,57 @@ class AssignedTagsStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class AssignedTagsBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["AssignedTags"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each AssignedTags at end. + * Each AssignedTags response is fully-specified (all fields set). + * start: Returns the state of each AssignedTags at start, followed by updates until now. + * Each AssignedTags response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each AssignedTags at start, followed by updates + until end. + * Each AssignedTags response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class AssignedTagsBatchedStreamResponse(aristaproto.Message): + responses: List["AssignedTagsStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class AssignedTagsConfigRequest(aristaproto.Message): key: "StudioKey" = aristaproto.message_field(1) @@ -1565,6 +1608,57 @@ class AssignedTagsConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class AssignedTagsConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["AssignedTagsConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each AssignedTagsConfig at end. + * Each AssignedTagsConfig response is fully-specified (all fields set). + * start: Returns the state of each AssignedTagsConfig at start, followed by updates until now. + * Each AssignedTagsConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each AssignedTagsConfig at start, followed by updates + until end. + * Each AssignedTagsConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class AssignedTagsConfigBatchedStreamResponse(aristaproto.Message): + responses: List["AssignedTagsConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class AssignedTagsConfigSetRequest(aristaproto.Message): value: "AssignedTagsConfig" = aristaproto.message_field(1) @@ -1794,6 +1888,57 @@ class AutofillActionStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class AutofillActionBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["AutofillAction"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each AutofillAction at end. + * Each AutofillAction response is fully-specified (all fields set). + * start: Returns the state of each AutofillAction at start, followed by updates until now. + * Each AutofillAction response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each AutofillAction at start, followed by updates + until end. + * Each AutofillAction response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class AutofillActionBatchedStreamResponse(aristaproto.Message): + responses: List["AutofillActionStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class AutofillActionConfigRequest(aristaproto.Message): key: "AutofillActionKey" = aristaproto.message_field(1) @@ -1909,6 +2054,57 @@ class AutofillActionConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class AutofillActionConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["AutofillActionConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each AutofillActionConfig at end. + * Each AutofillActionConfig response is fully-specified (all fields set). + * start: Returns the state of each AutofillActionConfig at start, followed by updates until now. + * Each AutofillActionConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each AutofillActionConfig at start, followed by updates + until end. + * Each AutofillActionConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class AutofillActionConfigBatchedStreamResponse(aristaproto.Message): + responses: List["AutofillActionConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class AutofillActionConfigSetRequest(aristaproto.Message): value: "AutofillActionConfig" = aristaproto.message_field(1) @@ -2140,6 +2336,57 @@ class InputsStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class InputsBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Inputs"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Inputs at end. + * Each Inputs response is fully-specified (all fields set). + * start: Returns the state of each Inputs at start, followed by updates until now. + * Each Inputs response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Inputs at start, followed by updates + until end. + * Each Inputs response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class InputsBatchedStreamResponse(aristaproto.Message): + responses: List["InputsStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class InputsConfigRequest(aristaproto.Message): key: "InputsKey" = aristaproto.message_field(1) @@ -2253,6 +2500,57 @@ class InputsConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class InputsConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["InputsConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each InputsConfig at end. + * Each InputsConfig response is fully-specified (all fields set). + * start: Returns the state of each InputsConfig at start, followed by updates until now. + * Each InputsConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each InputsConfig at start, followed by updates + until end. + * Each InputsConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class InputsConfigBatchedStreamResponse(aristaproto.Message): + responses: List["InputsConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class InputsConfigSetRequest(aristaproto.Message): value: "InputsConfig" = aristaproto.message_field(1) @@ -2480,6 +2778,57 @@ class SecretInputStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class SecretInputBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["SecretInput"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each SecretInput at end. + * Each SecretInput response is fully-specified (all fields set). + * start: Returns the state of each SecretInput at start, followed by updates until now. + * Each SecretInput response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each SecretInput at start, followed by updates + until end. + * Each SecretInput response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class SecretInputBatchedStreamResponse(aristaproto.Message): + responses: List["SecretInputStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class StudioRequest(aristaproto.Message): key: "StudioKey" = aristaproto.message_field(1) @@ -2593,6 +2942,57 @@ class StudioStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class StudioBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["Studio"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each Studio at end. + * Each Studio response is fully-specified (all fields set). + * start: Returns the state of each Studio at start, followed by updates until now. + * Each Studio response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each Studio at start, followed by updates + until end. + * Each Studio response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class StudioBatchedStreamResponse(aristaproto.Message): + responses: List["StudioStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class StudioConfigRequest(aristaproto.Message): key: "StudioKey" = aristaproto.message_field(1) @@ -2706,6 +3106,57 @@ class StudioConfigStreamResponse(aristaproto.Message): """ +@dataclass(eq=False, repr=False) +class StudioConfigBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["StudioConfig"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each StudioConfig at end. + * Each StudioConfig response is fully-specified (all fields set). + * start: Returns the state of each StudioConfig at start, followed by updates until now. + * Each StudioConfig response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each StudioConfig at start, followed by updates + until end. + * Each StudioConfig response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. + """ + + +@dataclass(eq=False, repr=False) +class StudioConfigBatchedStreamResponse(aristaproto.Message): + responses: List["StudioConfigStreamResponse"] = aristaproto.message_field(1) + """ + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). + """ + + @dataclass(eq=False, repr=False) class StudioConfigSetRequest(aristaproto.Message): value: "StudioConfig" = aristaproto.message_field(1) @@ -2916,20 +3367,71 @@ class StudioSummaryStreamRequest(aristaproto.Message): class StudioSummaryStreamResponse(aristaproto.Message): value: "StudioSummary" = aristaproto.message_field(1) """ - Value is a value deemed relevant to the initiating request. - This structure will always have its key-field populated. Which other fields are - populated, and why, depends on the value of Operation and what triggered this notification. + Value is a value deemed relevant to the initiating request. + This structure will always have its key-field populated. Which other fields are + populated, and why, depends on the value of Operation and what triggered this notification. + """ + + time: datetime = aristaproto.message_field(2) + """Time holds the timestamp of this StudioSummary's last modification.""" + + type: "__subscriptions__.Operation" = aristaproto.enum_field(3) + """ + Operation indicates how the StudioSummary value in this response should be considered. + Under non-subscribe requests, this value should always be INITIAL. In a subscription, + once all initial data is streamed and the client begins to receive modification updates, + you should not see INITIAL again. + """ + + +@dataclass(eq=False, repr=False) +class StudioSummaryBatchedStreamRequest(aristaproto.Message): + partial_eq_filter: List["StudioSummary"] = aristaproto.message_field(1) + """ + PartialEqFilter provides a way to server-side filter a GetAll/Subscribe. + This requires all provided fields to be equal to the response. + + While transparent to users, this field also allows services to optimize internal + subscriptions if filter(s) are sufficiently specific. + """ + + time: "__time__.TimeBounds" = aristaproto.message_field(3) + """ + TimeRange allows limiting response data to within a specified time window. + If this field is populated, at least one of the two time fields are required. + + For GetAll, the fields start and end can be used as follows: + + * end: Returns the state of each StudioSummary at end. + * Each StudioSummary response is fully-specified (all fields set). + * start: Returns the state of each StudioSummary at start, followed by updates until now. + * Each StudioSummary response at start is fully-specified, but updates may be partial. + * start and end: Returns the state of each StudioSummary at start, followed by updates + until end. + * Each StudioSummary response at start is fully-specified, but updates until end may + be partial. + + This field is not allowed in the Subscribe RPC. + """ + + max_messages: Optional[int] = aristaproto.message_field( + 4, wraps=aristaproto.TYPE_UINT32 + ) + """ + MaxMessages limits the maximum number of messages that can be contained in one batch. + MaxMessages is required to be at least 1. + The maximum number of messages in a batch is min(max_messages, INTERNAL_BATCH_LIMIT) + INTERNAL_BATCH_LIMIT is set based on the maximum message size. """ - time: datetime = aristaproto.message_field(2) - """Time holds the timestamp of this StudioSummary's last modification.""" - type: "__subscriptions__.Operation" = aristaproto.enum_field(3) +@dataclass(eq=False, repr=False) +class StudioSummaryBatchedStreamResponse(aristaproto.Message): + responses: List["StudioSummaryStreamResponse"] = aristaproto.message_field(1) """ - Operation indicates how the StudioSummary value in this response should be considered. - Under non-subscribe requests, this value should always be INITIAL. In a subscription, - once all initial data is streamed and the client begins to receive modification updates, - you should not see INITIAL again. + Values are the values deemed relevant to the initiating request. + The length of this structure is guaranteed to be between (inclusive) 1 and + min(req.max_messages, INTERNAL_BATCH_LIMIT). """ @@ -3040,6 +3542,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + assigned_tags_batched_stream_request: "AssignedTagsBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AssignedTagsBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AssignedTagsService/GetAllBatched", + assigned_tags_batched_stream_request, + AssignedTagsBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + assigned_tags_batched_stream_request: "AssignedTagsBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AssignedTagsBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AssignedTagsService/SubscribeBatched", + assigned_tags_batched_stream_request, + AssignedTagsBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class AssignedTagsConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -3236,6 +3774,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + assigned_tags_config_batched_stream_request: "AssignedTagsConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AssignedTagsConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AssignedTagsConfigService/GetAllBatched", + assigned_tags_config_batched_stream_request, + AssignedTagsConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + assigned_tags_config_batched_stream_request: "AssignedTagsConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AssignedTagsConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AssignedTagsConfigService/SubscribeBatched", + assigned_tags_config_batched_stream_request, + AssignedTagsConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class AutofillActionServiceStub(aristaproto.ServiceStub): async def get_one( @@ -3344,6 +3918,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + autofill_action_batched_stream_request: "AutofillActionBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AutofillActionBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AutofillActionService/GetAllBatched", + autofill_action_batched_stream_request, + AutofillActionBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + autofill_action_batched_stream_request: "AutofillActionBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AutofillActionBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AutofillActionService/SubscribeBatched", + autofill_action_batched_stream_request, + AutofillActionBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class AutofillActionConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -3540,6 +4150,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + autofill_action_config_batched_stream_request: "AutofillActionConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AutofillActionConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AutofillActionConfigService/GetAllBatched", + autofill_action_config_batched_stream_request, + AutofillActionConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + autofill_action_config_batched_stream_request: "AutofillActionConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["AutofillActionConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.AutofillActionConfigService/SubscribeBatched", + autofill_action_config_batched_stream_request, + AutofillActionConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class InputsServiceStub(aristaproto.ServiceStub): async def get_one( @@ -3648,6 +4294,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + inputs_batched_stream_request: "InputsBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["InputsBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.InputsService/GetAllBatched", + inputs_batched_stream_request, + InputsBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + inputs_batched_stream_request: "InputsBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["InputsBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.InputsService/SubscribeBatched", + inputs_batched_stream_request, + InputsBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class InputsConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -3844,6 +4526,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + inputs_config_batched_stream_request: "InputsConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["InputsConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.InputsConfigService/GetAllBatched", + inputs_config_batched_stream_request, + InputsConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + inputs_config_batched_stream_request: "InputsConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["InputsConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.InputsConfigService/SubscribeBatched", + inputs_config_batched_stream_request, + InputsConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class SecretInputServiceStub(aristaproto.ServiceStub): async def get_one( @@ -3952,6 +4670,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + secret_input_batched_stream_request: "SecretInputBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecretInputBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.SecretInputService/GetAllBatched", + secret_input_batched_stream_request, + SecretInputBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + secret_input_batched_stream_request: "SecretInputBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["SecretInputBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.SecretInputService/SubscribeBatched", + secret_input_batched_stream_request, + SecretInputBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class StudioServiceStub(aristaproto.ServiceStub): async def get_one( @@ -4060,6 +4814,42 @@ async def subscribe_meta( ): yield response + async def get_all_batched( + self, + studio_batched_stream_request: "StudioBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["StudioBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.StudioService/GetAllBatched", + studio_batched_stream_request, + StudioBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + studio_batched_stream_request: "StudioBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["StudioBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.StudioService/SubscribeBatched", + studio_batched_stream_request, + StudioBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class StudioConfigServiceStub(aristaproto.ServiceStub): async def get_one( @@ -4256,6 +5046,42 @@ async def delete_all( ): yield response + async def get_all_batched( + self, + studio_config_batched_stream_request: "StudioConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["StudioConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.StudioConfigService/GetAllBatched", + studio_config_batched_stream_request, + StudioConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def subscribe_batched( + self, + studio_config_batched_stream_request: "StudioConfigBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["StudioConfigBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.StudioConfigService/SubscribeBatched", + studio_config_batched_stream_request, + StudioConfigBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + class StudioSummaryServiceStub(aristaproto.ServiceStub): async def get_one( @@ -4344,20 +5170,56 @@ async def get_meta( timeout=timeout, deadline=deadline, metadata=metadata, - ) + ) + + async def subscribe_meta( + self, + studio_summary_stream_request: "StudioSummaryStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["MetaResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.StudioSummaryService/SubscribeMeta", + studio_summary_stream_request, + MetaResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response + + async def get_all_batched( + self, + studio_summary_batched_stream_request: "StudioSummaryBatchedStreamRequest", + *, + timeout: Optional[float] = None, + deadline: Optional["Deadline"] = None, + metadata: Optional["MetadataLike"] = None + ) -> AsyncIterator["StudioSummaryBatchedStreamResponse"]: + async for response in self._unary_stream( + "/arista.studio.v1.StudioSummaryService/GetAllBatched", + studio_summary_batched_stream_request, + StudioSummaryBatchedStreamResponse, + timeout=timeout, + deadline=deadline, + metadata=metadata, + ): + yield response - async def subscribe_meta( + async def subscribe_batched( self, - studio_summary_stream_request: "StudioSummaryStreamRequest", + studio_summary_batched_stream_request: "StudioSummaryBatchedStreamRequest", *, timeout: Optional[float] = None, deadline: Optional["Deadline"] = None, metadata: Optional["MetadataLike"] = None - ) -> AsyncIterator["MetaResponse"]: + ) -> AsyncIterator["StudioSummaryBatchedStreamResponse"]: async for response in self._unary_stream( - "/arista.studio.v1.StudioSummaryService/SubscribeMeta", - studio_summary_stream_request, - MetaResponse, + "/arista.studio.v1.StudioSummaryService/SubscribeBatched", + studio_summary_batched_stream_request, + StudioSummaryBatchedStreamResponse, timeout=timeout, deadline=deadline, metadata=metadata, @@ -4397,6 +5259,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, assigned_tags_batched_stream_request: "AssignedTagsBatchedStreamRequest" + ) -> AsyncIterator["AssignedTagsBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, assigned_tags_batched_stream_request: "AssignedTagsBatchedStreamRequest" + ) -> AsyncIterator["AssignedTagsBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[AssignedTagsRequest, AssignedTagsResponse]" ) -> None: @@ -4454,6 +5326,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[AssignedTagsBatchedStreamRequest, AssignedTagsBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[AssignedTagsBatchedStreamRequest, AssignedTagsBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.AssignedTagsService/GetOne": grpclib.const.Handler( @@ -4492,6 +5386,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: AssignedTagsStreamRequest, MetaResponse, ), + "/arista.studio.v1.AssignedTagsService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AssignedTagsBatchedStreamRequest, + AssignedTagsBatchedStreamResponse, + ), + "/arista.studio.v1.AssignedTagsService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AssignedTagsBatchedStreamRequest, + AssignedTagsBatchedStreamResponse, + ), } @@ -4554,6 +5460,18 @@ async def delete_all( ) -> AsyncIterator["AssignedTagsConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + assigned_tags_config_batched_stream_request: "AssignedTagsConfigBatchedStreamRequest", + ) -> AsyncIterator["AssignedTagsConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + assigned_tags_config_batched_stream_request: "AssignedTagsConfigBatchedStreamRequest", + ) -> AsyncIterator["AssignedTagsConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[AssignedTagsConfigRequest, AssignedTagsConfigResponse]", @@ -4663,6 +5581,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[AssignedTagsConfigBatchedStreamRequest, AssignedTagsConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[AssignedTagsConfigBatchedStreamRequest, AssignedTagsConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.AssignedTagsConfigService/GetOne": grpclib.const.Handler( @@ -4731,6 +5671,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: AssignedTagsConfigDeleteAllRequest, AssignedTagsConfigDeleteAllResponse, ), + "/arista.studio.v1.AssignedTagsConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AssignedTagsConfigBatchedStreamRequest, + AssignedTagsConfigBatchedStreamResponse, + ), + "/arista.studio.v1.AssignedTagsConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AssignedTagsConfigBatchedStreamRequest, + AssignedTagsConfigBatchedStreamResponse, + ), } @@ -4766,6 +5718,18 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + autofill_action_batched_stream_request: "AutofillActionBatchedStreamRequest", + ) -> AsyncIterator["AutofillActionBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + autofill_action_batched_stream_request: "AutofillActionBatchedStreamRequest", + ) -> AsyncIterator["AutofillActionBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[AutofillActionRequest, AutofillActionResponse]", @@ -4824,6 +5788,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[AutofillActionBatchedStreamRequest, AutofillActionBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[AutofillActionBatchedStreamRequest, AutofillActionBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.AutofillActionService/GetOne": grpclib.const.Handler( @@ -4862,6 +5848,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: AutofillActionStreamRequest, MetaResponse, ), + "/arista.studio.v1.AutofillActionService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AutofillActionBatchedStreamRequest, + AutofillActionBatchedStreamResponse, + ), + "/arista.studio.v1.AutofillActionService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AutofillActionBatchedStreamRequest, + AutofillActionBatchedStreamResponse, + ), } @@ -4925,6 +5923,18 @@ async def delete_all( ) -> AsyncIterator["AutofillActionConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, + autofill_action_config_batched_stream_request: "AutofillActionConfigBatchedStreamRequest", + ) -> AsyncIterator["AutofillActionConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, + autofill_action_config_batched_stream_request: "AutofillActionConfigBatchedStreamRequest", + ) -> AsyncIterator["AutofillActionConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[AutofillActionConfigRequest, AutofillActionConfigResponse]", @@ -5034,6 +6044,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[AutofillActionConfigBatchedStreamRequest, AutofillActionConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[AutofillActionConfigBatchedStreamRequest, AutofillActionConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.AutofillActionConfigService/GetOne": grpclib.const.Handler( @@ -5102,6 +6134,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: AutofillActionConfigDeleteAllRequest, AutofillActionConfigDeleteAllResponse, ), + "/arista.studio.v1.AutofillActionConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AutofillActionConfigBatchedStreamRequest, + AutofillActionConfigBatchedStreamResponse, + ), + "/arista.studio.v1.AutofillActionConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + AutofillActionConfigBatchedStreamRequest, + AutofillActionConfigBatchedStreamResponse, + ), } @@ -5135,6 +6179,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, inputs_batched_stream_request: "InputsBatchedStreamRequest" + ) -> AsyncIterator["InputsBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, inputs_batched_stream_request: "InputsBatchedStreamRequest" + ) -> AsyncIterator["InputsBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[InputsRequest, InputsResponse]" ) -> None: @@ -5189,6 +6243,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[InputsBatchedStreamRequest, InputsBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[InputsBatchedStreamRequest, InputsBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.InputsService/GetOne": grpclib.const.Handler( @@ -5227,6 +6303,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: InputsStreamRequest, MetaResponse, ), + "/arista.studio.v1.InputsService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + InputsBatchedStreamRequest, + InputsBatchedStreamResponse, + ), + "/arista.studio.v1.InputsService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + InputsBatchedStreamRequest, + InputsBatchedStreamResponse, + ), } @@ -5287,6 +6375,16 @@ async def delete_all( ) -> AsyncIterator["InputsConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, inputs_config_batched_stream_request: "InputsConfigBatchedStreamRequest" + ) -> AsyncIterator["InputsConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, inputs_config_batched_stream_request: "InputsConfigBatchedStreamRequest" + ) -> AsyncIterator["InputsConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[InputsConfigRequest, InputsConfigResponse]" ) -> None: @@ -5393,6 +6491,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[InputsConfigBatchedStreamRequest, InputsConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[InputsConfigBatchedStreamRequest, InputsConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.InputsConfigService/GetOne": grpclib.const.Handler( @@ -5461,6 +6581,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: InputsConfigDeleteAllRequest, InputsConfigDeleteAllResponse, ), + "/arista.studio.v1.InputsConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + InputsConfigBatchedStreamRequest, + InputsConfigBatchedStreamResponse, + ), + "/arista.studio.v1.InputsConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + InputsConfigBatchedStreamRequest, + InputsConfigBatchedStreamResponse, + ), } @@ -5496,6 +6628,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, secret_input_batched_stream_request: "SecretInputBatchedStreamRequest" + ) -> AsyncIterator["SecretInputBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, secret_input_batched_stream_request: "SecretInputBatchedStreamRequest" + ) -> AsyncIterator["SecretInputBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[SecretInputRequest, SecretInputResponse]" ) -> None: @@ -5553,6 +6695,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[SecretInputBatchedStreamRequest, SecretInputBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[SecretInputBatchedStreamRequest, SecretInputBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.SecretInputService/GetOne": grpclib.const.Handler( @@ -5591,6 +6755,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: SecretInputStreamRequest, MetaResponse, ), + "/arista.studio.v1.SecretInputService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecretInputBatchedStreamRequest, + SecretInputBatchedStreamResponse, + ), + "/arista.studio.v1.SecretInputService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + SecretInputBatchedStreamRequest, + SecretInputBatchedStreamResponse, + ), } @@ -5624,6 +6800,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, studio_batched_stream_request: "StudioBatchedStreamRequest" + ) -> AsyncIterator["StudioBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, studio_batched_stream_request: "StudioBatchedStreamRequest" + ) -> AsyncIterator["StudioBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[StudioRequest, StudioResponse]" ) -> None: @@ -5678,6 +6864,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[StudioBatchedStreamRequest, StudioBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[StudioBatchedStreamRequest, StudioBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.StudioService/GetOne": grpclib.const.Handler( @@ -5716,6 +6924,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: StudioStreamRequest, MetaResponse, ), + "/arista.studio.v1.StudioService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + StudioBatchedStreamRequest, + StudioBatchedStreamResponse, + ), + "/arista.studio.v1.StudioService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + StudioBatchedStreamRequest, + StudioBatchedStreamResponse, + ), } @@ -5776,6 +6996,16 @@ async def delete_all( ) -> AsyncIterator["StudioConfigDeleteAllResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, studio_config_batched_stream_request: "StudioConfigBatchedStreamRequest" + ) -> AsyncIterator["StudioConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, studio_config_batched_stream_request: "StudioConfigBatchedStreamRequest" + ) -> AsyncIterator["StudioConfigBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[StudioConfigRequest, StudioConfigResponse]" ) -> None: @@ -5882,6 +7112,28 @@ async def __rpc_delete_all( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[StudioConfigBatchedStreamRequest, StudioConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[StudioConfigBatchedStreamRequest, StudioConfigBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.StudioConfigService/GetOne": grpclib.const.Handler( @@ -5950,6 +7202,18 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: StudioConfigDeleteAllRequest, StudioConfigDeleteAllResponse, ), + "/arista.studio.v1.StudioConfigService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + StudioConfigBatchedStreamRequest, + StudioConfigBatchedStreamResponse, + ), + "/arista.studio.v1.StudioConfigService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + StudioConfigBatchedStreamRequest, + StudioConfigBatchedStreamResponse, + ), } @@ -5985,6 +7249,16 @@ async def subscribe_meta( ) -> AsyncIterator["MetaResponse"]: raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def get_all_batched( + self, studio_summary_batched_stream_request: "StudioSummaryBatchedStreamRequest" + ) -> AsyncIterator["StudioSummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def subscribe_batched( + self, studio_summary_batched_stream_request: "StudioSummaryBatchedStreamRequest" + ) -> AsyncIterator["StudioSummaryBatchedStreamResponse"]: + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + async def __rpc_get_one( self, stream: "grpclib.server.Stream[StudioSummaryRequest, StudioSummaryResponse]", @@ -6043,6 +7317,28 @@ async def __rpc_subscribe_meta( request, ) + async def __rpc_get_all_batched( + self, + stream: "grpclib.server.Stream[StudioSummaryBatchedStreamRequest, StudioSummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.get_all_batched, + stream, + request, + ) + + async def __rpc_subscribe_batched( + self, + stream: "grpclib.server.Stream[StudioSummaryBatchedStreamRequest, StudioSummaryBatchedStreamResponse]", + ) -> None: + request = await stream.recv_message() + await self._call_rpc_handler_server_stream( + self.subscribe_batched, + stream, + request, + ) + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: return { "/arista.studio.v1.StudioSummaryService/GetOne": grpclib.const.Handler( @@ -6081,4 +7377,16 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: StudioSummaryStreamRequest, MetaResponse, ), + "/arista.studio.v1.StudioSummaryService/GetAllBatched": grpclib.const.Handler( + self.__rpc_get_all_batched, + grpclib.const.Cardinality.UNARY_STREAM, + StudioSummaryBatchedStreamRequest, + StudioSummaryBatchedStreamResponse, + ), + "/arista.studio.v1.StudioSummaryService/SubscribeBatched": grpclib.const.Handler( + self.__rpc_subscribe_batched, + grpclib.const.Cardinality.UNARY_STREAM, + StudioSummaryBatchedStreamRequest, + StudioSummaryBatchedStreamResponse, + ), } diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/subscriptions/__init__.py b/python-avd/pyavd/_cv/api/arista/subscriptions/__init__.py similarity index 84% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/subscriptions/__init__.py rename to python-avd/pyavd/_cv/api/arista/subscriptions/__init__.py index c7e21fe8ff3..633e7bb9348 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/subscriptions/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/subscriptions/__init__.py @@ -8,13 +8,7 @@ from dataclasses import dataclass -try: - import aristaproto -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto -else: - HAS_ARISTAPROTO = True +import aristaproto class Operation(aristaproto.Enum): diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/__init__.py b/python-avd/pyavd/_cv/api/arista/swg/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/__init__.py rename to python-avd/pyavd/_cv/api/arista/swg/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/swg/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/swg/v1/__init__.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/swg/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/swg/v1/__init__.py index 8f53adf49db..a75e66fd970 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/swg/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/swg/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/workspace/__init__.py b/python-avd/pyavd/_cv/api/arista/tag/__init__.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/workspace/__init__.py rename to python-avd/pyavd/_cv/api/arista/tag/__init__.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/v2/__init__.py b/python-avd/pyavd/_cv/api/arista/tag/v2/__init__.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/v2/__init__.py rename to python-avd/pyavd/_cv/api/arista/tag/v2/__init__.py index 764b7c8bf36..1193d76bc7c 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/tag/v2/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/tag/v2/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/time/__init__.py b/python-avd/pyavd/_cv/api/arista/time/__init__.py similarity index 69% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/time/__init__.py rename to python-avd/pyavd/_cv/api/arista/time/__init__.py index 67aed021668..ffd33402025 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/time/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/time/__init__.py @@ -9,13 +9,7 @@ from dataclasses import dataclass from datetime import datetime -try: - import aristaproto -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto -else: - HAS_ARISTAPROTO = True +import aristaproto @dataclass(eq=False, repr=False) diff --git a/python-avd/pyavd/_cv/api/arista/workspace/__init__.py b/python-avd/pyavd/_cv/api/arista/workspace/__init__.py new file mode 100644 index 00000000000..b17ca7c745d --- /dev/null +++ b/python-avd/pyavd/_cv/api/arista/workspace/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/workspace/v1/__init__.py b/python-avd/pyavd/_cv/api/arista/workspace/v1/__init__.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/workspace/v1/__init__.py rename to python-avd/pyavd/_cv/api/arista/workspace/v1/__init__.py index 095a316e086..5cb57b2a4ed 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/arista/workspace/v1/__init__.py +++ b/python-avd/pyavd/_cv/api/arista/workspace/v1/__init__.py @@ -16,17 +16,9 @@ Optional, ) -try: - import aristaproto - import grpclib - from aristaproto.grpc.grpclib_server import ServiceBase -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_grpclib as grpclib - ServiceBase = object -else: - HAS_ARISTAPROTO = True +import aristaproto +import grpclib +from aristaproto.grpc.grpclib_server import ServiceBase from .... import fmp as ___fmp__ from ... import ( diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/fmp/__init__.py b/python-avd/pyavd/_cv/api/fmp/__init__.py similarity index 98% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/fmp/__init__.py rename to python-avd/pyavd/_cv/api/fmp/__init__.py index ab25968c8bf..d4c15f5c4c4 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/api/fmp/__init__.py +++ b/python-avd/pyavd/_cv/api/fmp/__init__.py @@ -12,13 +12,7 @@ List, ) -try: - import aristaproto -except ImportError: - HAS_ARISTAPROTO = False - from ansible_collections.arista.avd.plugins.plugin_utils.cv_client.mocked_classes import mocked_aristaproto as aristaproto -else: - HAS_ARISTAPROTO = True +import aristaproto class SortDirection(aristaproto.Enum): diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/cv_client.py b/python-avd/pyavd/_cv/client/__init__.py similarity index 95% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/cv_client.py rename to python-avd/pyavd/_cv/client/__init__.py index 7e84eb841fb..eac44211c5a 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/cv_client.py +++ b/python-avd/pyavd/_cv/client/__init__.py @@ -5,19 +5,8 @@ import ssl -try: - from requests import JSONDecodeError, post -except ImportError: - HAS_REQUESTS = False -else: - HAS_REQUESTS = True - -try: - from grpclib.client import Channel -except ImportError: - HAS_GRPCLIB = False -else: - HAS_GRPCLIB = True +from grpclib.client import Channel +from requests import JSONDecodeError, post from .change_control import ChangeControlMixin from .configlet import ConfigletMixin diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/change_control.py b/python-avd/pyavd/_cv/client/change_control.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/change_control.py rename to python-avd/pyavd/_cv/client/change_control.py index fa67d497abd..2395afe4b01 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/change_control.py +++ b/python-avd/pyavd/_cv/client/change_control.py @@ -29,7 +29,7 @@ if TYPE_CHECKING: from aristaproto import _DateTime - from .cv_client import CVClient + from . import CVClient LOGGER = getLogger(__name__) diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/configlet.py b/python-avd/pyavd/_cv/client/configlet.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/configlet.py rename to python-avd/pyavd/_cv/client/configlet.py index 1328b8155b0..373c7d8063c 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/configlet.py +++ b/python-avd/pyavd/_cv/client/configlet.py @@ -31,7 +31,7 @@ from .exceptions import get_cv_client_exception if TYPE_CHECKING: - from .cv_client import CVClient + from . import CVClient ASSIGNMENT_MATCH_POLICY_MAP = { diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/constants.py b/python-avd/pyavd/_cv/client/constants.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/constants.py rename to python-avd/pyavd/_cv/client/constants.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/exceptions.py b/python-avd/pyavd/_cv/client/exceptions.py similarity index 87% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/exceptions.py rename to python-avd/pyavd/_cv/client/exceptions.py index 7f14869ba5e..a1f02d86ac4 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/exceptions.py +++ b/python-avd/pyavd/_cv/client/exceptions.py @@ -5,13 +5,8 @@ from asyncio.exceptions import TimeoutError -try: - from grpclib.const import Status - from grpclib.exceptions import GRPCError -except ImportError: - HAS_GRPCLIB = False -else: - HAS_GRPCLIB = True +from grpclib.const import Status +from grpclib.exceptions import GRPCError def get_cv_client_exception(exception: Exception, cv_client_details: str | None = None) -> Exception | None: @@ -24,11 +19,8 @@ def get_cv_client_exception(exception: Exception, cv_client_details: str | None Returns: None if If the exception is unmatched, otherwise an instance of the relevant CVClientException subclass. """ - if not HAS_GRPCLIB: - raise RuntimeError("Missing Python library 'grpclib'") - if isinstance(exception, GRPCError): - status, message, details = exception.args + status = exception.args[0] if status == Status.NOT_FOUND: return CVResourceNotFound(cv_client_details, *exception.args) if status == Status.CANCELLED: diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/inventory.py b/python-avd/pyavd/_cv/client/inventory.py similarity index 98% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/inventory.py rename to python-avd/pyavd/_cv/client/inventory.py index 6bb4c8fe4cc..ee543278f20 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/inventory.py +++ b/python-avd/pyavd/_cv/client/inventory.py @@ -11,7 +11,7 @@ from .exceptions import get_cv_client_exception if TYPE_CHECKING: - from .cv_client import CVClient + from . import CVClient class InventoryMixin: diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/studio.py b/python-avd/pyavd/_cv/client/studio.py similarity index 98% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/studio.py rename to python-avd/pyavd/_cv/client/studio.py index d9fd175b692..d2c197ff294 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/studio.py +++ b/python-avd/pyavd/_cv/client/studio.py @@ -32,7 +32,7 @@ from .exceptions import CVResourceNotFound, get_cv_client_exception if TYPE_CHECKING: - from .cv_client import CVClient + from . import CVClient LOGGER = getLogger(__name__) @@ -78,7 +78,7 @@ async def get_studio( try: response = await client.get_one(request, metadata=self._metadata, timeout=timeout) return response.value - except Exception as e: + except Exception as e: # pylint: disable=broad-exception-caught e = get_cv_client_exception(e, f"Studio ID '{studio_id}, Workspace ID '{workspace_id}'") or e if isinstance(e, CVResourceNotFound): # Continue execution if we did not find any state in the workspace. @@ -270,10 +270,9 @@ async def get_studio_inputs_with_path( # We only get a response if the inputs are set/changed in the workspace. if response.value.inputs is not None: return json.loads(response.value.inputs) - else: - return default_value + return default_value - except Exception as e: + except Exception as e: # pylint: disable=broad-exception-caught e = get_cv_client_exception(e, f"Studio ID '{studio_id}, Workspace ID '{workspace_id}', Path '{input_path}'") or e if isinstance(e, CVResourceNotFound) and workspace_id != "": # Ignore this error, since it simply means we have to check if inputs got deleted in this workspace or fetch from mainline as last resort. @@ -319,15 +318,13 @@ async def get_studio_inputs_with_path( response = await client.get_one(request, metadata=self._metadata, timeout=timeout) if response.value.inputs is not None: return json.loads(response.value.inputs) - else: - return default_value - except Exception as e: + return default_value + except Exception as e: # pylint: disable=broad-exception-caught e = get_cv_client_exception(e, f"Studio ID '{studio_id}, Workspace ID '{workspace_id}', Path '{input_path}'") or e if isinstance(e, CVResourceNotFound): # Ignore this error, since it simply means we no inputs are in the studio so we will return the default value. return default_value - else: - raise e + raise e async def set_studio_inputs( self: CVClient, diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/swg.py b/python-avd/pyavd/_cv/client/swg.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/swg.py rename to python-avd/pyavd/_cv/client/swg.py index 37242cac1dc..fb37d457ba2 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/swg.py +++ b/python-avd/pyavd/_cv/client/swg.py @@ -21,7 +21,7 @@ from .exceptions import get_cv_client_exception if TYPE_CHECKING: - from .cv_client import CVClient + from . import CVClient LOGGER = getLogger(__name__) diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/tag.py b/python-avd/pyavd/_cv/client/tag.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/tag.py rename to python-avd/pyavd/_cv/client/tag.py index 32ffd5fa047..1332c672772 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/tag.py +++ b/python-avd/pyavd/_cv/client/tag.py @@ -30,7 +30,7 @@ from .exceptions import get_cv_client_exception if TYPE_CHECKING: - from .cv_client import CVClient + from . import CVClient ELEMENT_TYPE_MAP = { "device": ElementType.DEVICE, diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/utils.py b/python-avd/pyavd/_cv/client/utils.py similarity index 93% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/utils.py rename to python-avd/pyavd/_cv/client/utils.py index f6db1631ffa..9c0bcf1b7ee 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/utils.py +++ b/python-avd/pyavd/_cv/client/utils.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any, Callable if TYPE_CHECKING: - from .cv_client import CVClient + from . import CVClient class UtilsMixin: @@ -24,8 +24,8 @@ def _remove_item_from_list(itm, lst: list, matcher: Callable) -> None: Ignore if we are told to remove an item that is not present. This happens if you add a tag in a workspace and then remove it again. """ - for index in range(len(lst)): - if matcher(lst[index], itm): + for index, item in enumerate(lst): + if matcher(item, itm): lst.pop(index) return @@ -36,8 +36,8 @@ def _upsert_item_in_list(itm, lst: list, matcher: Callable) -> None: Used for Tags and TagAssignments. """ - for index in range(len(lst)): - if matcher(lst[index], itm): + for index, item in enumerate(lst): + if matcher(item, itm): lst[index] = itm return @@ -59,8 +59,7 @@ def _set_value_from_path(self: CVClient, path: list[str], data: list | dict, val if isinstance(value, dict) and isinstance(data, dict): data.update(value) return - else: - raise RuntimeError(f"Path '{path}', value type '{type(value)}' cannot be set on data type '{type(data)}'") + raise RuntimeError(f"Path '{path}', value type '{type(value)}' cannot be set on data type '{type(data)}'") # Convert '0' to 0. path = [int(element) if str(element).isnumeric() else element for element in path] if len(path) == 1: diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/workspace.py b/python-avd/pyavd/_cv/client/workspace.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/workspace.py rename to python-avd/pyavd/_cv/client/workspace.py index 92190d3f2d1..d14341d1898 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/client/workspace.py +++ b/python-avd/pyavd/_cv/client/workspace.py @@ -25,7 +25,7 @@ from .exceptions import get_cv_client_exception if TYPE_CHECKING: - from .cv_client import CVClient + from . import CVClient LOGGER = getLogger(__name__) diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/extra_cv_protos/arista/swg.v1/services.gen.proto b/python-avd/pyavd/_cv/extra_cv_protos/arista/swg.v1/services.gen.proto similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/extra_cv_protos/arista/swg.v1/services.gen.proto rename to python-avd/pyavd/_cv/extra_cv_protos/arista/swg.v1/services.gen.proto diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/extra_cv_protos/arista/swg.v1/swg.proto b/python-avd/pyavd/_cv/extra_cv_protos/arista/swg.v1/swg.proto similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/extra_cv_protos/arista/swg.v1/swg.proto rename to python-avd/pyavd/_cv/extra_cv_protos/arista/swg.v1/swg.proto diff --git a/python-avd/pyavd/_cv/workflows/__init__.py b/python-avd/pyavd/_cv/workflows/__init__.py new file mode 100644 index 00000000000..b17ca7c745d --- /dev/null +++ b/python-avd/pyavd/_cv/workflows/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2024 Arista Networks, Inc. +# Use of this source code is governed by the Apache License 2.0 +# that can be found in the LICENSE file. diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/create_workspace_on_cv.py b/python-avd/pyavd/_cv/workflows/create_workspace_on_cv.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/create_workspace_on_cv.py rename to python-avd/pyavd/_cv/workflows/create_workspace_on_cv.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_configs_to_cv.py b/python-avd/pyavd/_cv/workflows/deploy_configs_to_cv.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_configs_to_cv.py rename to python-avd/pyavd/_cv/workflows/deploy_configs_to_cv.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_cv_pathfinder_metadata_to_cv.py b/python-avd/pyavd/_cv/workflows/deploy_cv_pathfinder_metadata_to_cv.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_cv_pathfinder_metadata_to_cv.py rename to python-avd/pyavd/_cv/workflows/deploy_cv_pathfinder_metadata_to_cv.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_studio_inputs_to_cv.py b/python-avd/pyavd/_cv/workflows/deploy_studio_inputs_to_cv.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_studio_inputs_to_cv.py rename to python-avd/pyavd/_cv/workflows/deploy_studio_inputs_to_cv.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_tags_to_cv.py b/python-avd/pyavd/_cv/workflows/deploy_tags_to_cv.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_tags_to_cv.py rename to python-avd/pyavd/_cv/workflows/deploy_tags_to_cv.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_to_cv.py b/python-avd/pyavd/_cv/workflows/deploy_to_cv.py similarity index 99% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_to_cv.py rename to python-avd/pyavd/_cv/workflows/deploy_to_cv.py index dec82153d4d..bb2c7d9f904 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/deploy_to_cv.py +++ b/python-avd/pyavd/_cv/workflows/deploy_to_cv.py @@ -42,7 +42,7 @@ async def deploy_to_cv( cv_pathfinder_metadata: list[CVPathfinderMetadata] | None = None, skip_missing_devices: bool = False, strict_tags: bool = True, - timeouts: CVTimeOuts | None = None, + timeouts: CVTimeOuts | None = None, # pylint: disable=unused-argument ) -> DeployToCvResult: """ Deploy various objects to CloudVision. diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/finalize_change_control_on_cv.py b/python-avd/pyavd/_cv/workflows/finalize_change_control_on_cv.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/finalize_change_control_on_cv.py rename to python-avd/pyavd/_cv/workflows/finalize_change_control_on_cv.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/finalize_workspace_on_cv.py b/python-avd/pyavd/_cv/workflows/finalize_workspace_on_cv.py similarity index 97% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/finalize_workspace_on_cv.py rename to python-avd/pyavd/_cv/workflows/finalize_workspace_on_cv.py index 6ba4e5ef3dd..d9ed0a3fa86 100644 --- a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/finalize_workspace_on_cv.py +++ b/python-avd/pyavd/_cv/workflows/finalize_workspace_on_cv.py @@ -31,7 +31,7 @@ async def finalize_workspace_on_cv(workspace: CVWorkspace, cv_client: CVClient) LOGGER.info("finalize_workspace_on_cv: %s", workspace) - if workspace.requested_state == workspace.state or workspace.requested_state == "pending": + if workspace.requested_state in (workspace.state, "pending"): return workspace_config = await cv_client.build_workspace(workspace_id=workspace.id) diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/models.py b/python-avd/pyavd/_cv/workflows/models.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/models.py rename to python-avd/pyavd/_cv/workflows/models.py diff --git a/ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/verify_devices_on_cv.py b/python-avd/pyavd/_cv/workflows/verify_devices_on_cv.py similarity index 100% rename from ansible_collections/arista/avd/plugins/plugin_utils/cv_client/workflows/verify_devices_on_cv.py rename to python-avd/pyavd/_cv/workflows/verify_devices_on_cv.py diff --git a/python-avd/pyavd/_eos_designs/structured_config/network_services/utils_zscaler.py b/python-avd/pyavd/_eos_designs/structured_config/network_services/utils_zscaler.py index 63a34d85874..df9eb36a3be 100644 --- a/python-avd/pyavd/_eos_designs/structured_config/network_services/utils_zscaler.py +++ b/python-avd/pyavd/_eos_designs/structured_config/network_services/utils_zscaler.py @@ -8,14 +8,14 @@ from logging import getLogger from typing import TYPE_CHECKING -from ....vendor.cv_client.client import CVClient -from ....vendor.cv_client.workflows.models import CVDevice -from ....vendor.cv_client.workflows.verify_devices_on_cv import verify_devices_in_cloudvision_inventory +from ...._cv.client import CVClient +from ...._cv.workflows.models import CVDevice +from ...._cv.workflows.verify_devices_on_cv import verify_devices_in_cloudvision_inventory from ....vendor.errors import AristaAvdError from ....vendor.utils import get if TYPE_CHECKING: - from ....vendor.cv_client.api.arista.swg.v1 import Location, VpnEndpoint + from ...._cv.api.arista.swg.v1 import Location, VpnEndpoint from . import AvdStructuredConfigNetworkServices LOGGER = getLogger(__name__) From 485ed57145d2730b34a71b49f113356417b54bb0 Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 13:02:58 +0200 Subject: [PATCH 02/11] Cleanup after rebase --- .../arista/avd/plugins/filter/decrypt.py | 11 ----------- python-avd/Makefile | 1 - .../workflows/deploy_cv_pathfinder_metadata_to_cv.py | 5 ++--- 3 files changed, 2 insertions(+), 15 deletions(-) diff --git a/ansible_collections/arista/avd/plugins/filter/decrypt.py b/ansible_collections/arista/avd/plugins/filter/decrypt.py index 1f318a40fe8..3179eebd5fa 100644 --- a/ansible_collections/arista/avd/plugins/filter/decrypt.py +++ b/ansible_collections/arista/avd/plugins/filter/decrypt.py @@ -16,17 +16,6 @@ orig_exc=e, ) ) -# TODO: Remove the below import once cv_client plugin utils have been moved to pyavd -try: - from pyavd._utils.password_utils.password import simple_7_decrypt # noqa: F401; pylint: disable=unused-import -except ImportError as e: - simple_7_decrypt = RaiseOnUse( - AnsibleFilterError( - "The 'simple_7_decrypt' plugin utils requires the 'pyavd' Python library. Got import error", - orig_exc=e, - ) - ) - DOCUMENTATION = r""" --- diff --git a/python-avd/Makefile b/python-avd/Makefile index 0086bb3848c..58a99e9a837 100644 --- a/python-avd/Makefile +++ b/python-avd/Makefile @@ -85,7 +85,6 @@ fix-libs: ## Fix/remove various Ansible specifics things from python files find $(PACKAGE_DIR) -name '*.py' -exec sed -i -e 's/ansible_collections\.arista\.avd\.plugins\.filter.add_md_toc/$(PYAVD_FILTER_IMPORT)\.add_md_toc/g' {} + find $(PACKAGE_DIR) -name '*.py' -exec sed -i -e 's/ansible_collections\.arista\.avd\.plugins\.filter.convert_dicts/$(PYAVD_FILTER_IMPORT)\.convert_dicts/g' {} + find $(PACKAGE_DIR) -name '*.py' -exec sed -i -e 's/ansible_collections\.arista\.avd\.plugins\.filter.default/$(PYAVD_FILTER_IMPORT)\.default/g' {} + - find $(PACKAGE_DIR) -name '*.py' -exec sed -i -e 's/ansible_collections\.arista\.avd\.plugins\.filter.decrypt import simple_7_decrypt/pyavd._utils.password_utils.password import simple_7_decrypt/g' {} + find $(PACKAGE_DIR) -name '*.py' -exec sed -i -e 's/ansible_collections\.arista\.avd\.plugins\.filter.decrypt/$(PYAVD_FILTER_IMPORT)\.decrypt/g' {} + find $(PACKAGE_DIR) -name '*.py' -exec sed -i -e 's/ansible_collections\.arista\.avd\.plugins\.filter.encrypt/$(PYAVD_FILTER_IMPORT)\.encrypt/g' {} + find $(PACKAGE_DIR) -name '*.py' -exec sed -i -e 's/ansible_collections\.arista\.avd\.plugins\.filter.is_in_filter/$(PYAVD_FILTER_IMPORT)\.is_in_filter/g' {} + diff --git a/python-avd/pyavd/_cv/workflows/deploy_cv_pathfinder_metadata_to_cv.py b/python-avd/pyavd/_cv/workflows/deploy_cv_pathfinder_metadata_to_cv.py index a9cd0ece7c1..8d98d1437c5 100644 --- a/python-avd/pyavd/_cv/workflows/deploy_cv_pathfinder_metadata_to_cv.py +++ b/python-avd/pyavd/_cv/workflows/deploy_cv_pathfinder_metadata_to_cv.py @@ -6,9 +6,8 @@ from copy import deepcopy from logging import getLogger -from ansible_collections.arista.avd.plugins.filter.decrypt import simple_7_decrypt - -from ...utils.get import get, get_v2 +from ..._utils.password_utils.password import simple_7_decrypt +from ...vendor.utils.get import get, get_v2 from ..api.arista.studio.v1 import InputSchema from ..client import CVClient from ..client.exceptions import CVResourceNotFound From e27a70a1f12daadf4521b5c84b79f285ea9f853f Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 13:18:12 +0200 Subject: [PATCH 03/11] fix pylint --- python-avd/pyavd/_cv/client/swg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python-avd/pyavd/_cv/client/swg.py b/python-avd/pyavd/_cv/client/swg.py index fb37d457ba2..b9568b2b2b6 100644 --- a/python-avd/pyavd/_cv/client/swg.py +++ b/python-avd/pyavd/_cv/client/swg.py @@ -94,7 +94,7 @@ async def wait_for_swg_endpoint_status( EndpointStatus object matching the device_id """ if start_time is None: - start_time = datetime().now() + start_time = datetime.now() request = EndpointStatusStreamRequest( partial_eq_filter=[ EndpointStatus( From 61f67928d7a9db98707823362e80ea3103227021 Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 13:31:32 +0200 Subject: [PATCH 04/11] fix tox --- python-avd/Makefile | 4 ++-- python-avd/pyproject.toml | 1 + python-avd/requirements.txt | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/python-avd/Makefile b/python-avd/Makefile index 58a99e9a837..fbb99f5c3d2 100644 --- a/python-avd/Makefile +++ b/python-avd/Makefile @@ -118,7 +118,7 @@ copy-test-data: ## Copy inventory files from AVD collection Molecule scenario. ## Excluding prompt because the unsafe marker is lost during the export of hostvars. ## Excluding custom-templates since there is no jinja support in pyavd - ANSIBLE_COLLECTIONS_PATHS="..:/usr/share/ansible/collections" ansible-playbook \ + ANSIBLE_COLLECTIONS_PATH="..:/usr/share/ansible/collections" ansible-playbook \ --limit 'all:!prompt:!custom-templates' \ -i $(ANSIBLE_AVD_DIR)/ansible_collections/arista/avd/molecule/eos_cli_config_gen/inventory/hosts.ini \ $(SCRIPTS_DIR)/export_test_vars.yml \ @@ -133,7 +133,7 @@ copy-test-data: ## Copy inventory files from AVD collection Molecule scenario. ## EOS_DESIGNS - ANSIBLE_COLLECTIONS_PATHS="..:/usr/share/ansible/collections" ansible-playbook \ + ANSIBLE_COLLECTIONS_PATH="..:/usr/share/ansible/collections" ansible-playbook \ -i $(ANSIBLE_AVD_DIR)/ansible_collections/arista/avd/molecule/eos_designs_unit_tests/inventory/hosts.yml $(SCRIPTS_DIR)/export_test_vars.yml \ -e testdir=eos_designs_unit_tests \ -f 10 diff --git a/python-avd/pyproject.toml b/python-avd/pyproject.toml index 150bbf3bd49..0ff94086b94 100644 --- a/python-avd/pyproject.toml +++ b/python-avd/pyproject.toml @@ -37,6 +37,7 @@ requires = [ "pydantic>=2.3.0", "eval-type-backport>=0.2.0", # Only needed for 3.9 "cryptography>=38.0.4", + "aristaproto>=0.1.1", ] build-backend = "custom_build_backend" backend-path = ["scripts"] diff --git a/python-avd/requirements.txt b/python-avd/requirements.txt index 7984b349d43..5a8bcb3d781 100644 --- a/python-avd/requirements.txt +++ b/python-avd/requirements.txt @@ -1,4 +1,5 @@ -deepmerge>=1.1.0 +aristaproto>=0.1.1 cryptography>=38.0.4 +deepmerge>=1.1.0 jinja2>=3.0 jsonschema>=4.10.3 From 3682748ccfc6a434d0c70c44c27d950c78f20c7d Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 13:34:28 +0200 Subject: [PATCH 05/11] fix ansible sanity test --- ansible_collections/arista/avd/plugins/action/cv_workflow.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ansible_collections/arista/avd/plugins/action/cv_workflow.py b/ansible_collections/arista/avd/plugins/action/cv_workflow.py index 7dcd1ea286f..2f2e6ca25a5 100644 --- a/ansible_collections/arista/avd/plugins/action/cv_workflow.py +++ b/ansible_collections/arista/avd/plugins/action/cv_workflow.py @@ -38,6 +38,9 @@ HAS_PYAVD = True except ImportError: HAS_PYAVD = False + import typing + + CVTimeOuts = typing.Any LOGGER = logging.getLogger("ansible_collections.arista.avd") LOGGING_LEVELS = ["DEBUG", "INFO", "ERROR", "WARNING", "CRITICAL"] From f9adb3a073f9577da0a050ae4ad26470b8123bbd Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 13:39:09 +0200 Subject: [PATCH 06/11] fix ansible sanity test --- .../arista/avd/plugins/action/cv_workflow.py | 73 +++++++++---------- 1 file changed, 36 insertions(+), 37 deletions(-) diff --git a/ansible_collections/arista/avd/plugins/action/cv_workflow.py b/ansible_collections/arista/avd/plugins/action/cv_workflow.py index 2f2e6ca25a5..70094ac3688 100644 --- a/ansible_collections/arista/avd/plugins/action/cv_workflow.py +++ b/ansible_collections/arista/avd/plugins/action/cv_workflow.py @@ -38,51 +38,50 @@ HAS_PYAVD = True except ImportError: HAS_PYAVD = False - import typing - CVTimeOuts = typing.Any LOGGER = logging.getLogger("ansible_collections.arista.avd") LOGGING_LEVELS = ["DEBUG", "INFO", "ERROR", "WARNING", "CRITICAL"] -ARGUMENT_SPEC = { - "configuration_dir": {"type": "str", "required": True}, - "structured_config_dir": {"type": "str", "required": True}, - "structured_config_suffix": {"type": "str", "default": "yml"}, - "device_list": {"type": "list", "elements": "str", "required": True}, - "strict_tags": {"type": "bool", "required": False, "default": False}, - "skip_missing_devices": {"type": "bool", "required": False, "default": False}, - "configlet_name_template": {"type": "str", "default": "AVD-${hostname}"}, - "cv_servers": {"type": "list", "elements": "str", "required": True}, - "cv_token": {"type": "str", "secret": True, "required": True}, - "cv_verify_certs": {"type": "bool", "default": True}, - "workspace": { - "type": "dict", - "options": { - "name": {"type": "str", "required": False}, - "description": {"type": "str", "required": False}, - "id": {"type": "str", "required": False}, - "requested_state": {"type": "str", "default": "built", "choices": ["pending", "built", "submitted", "abandoned", "deleted"]}, - "force": {"type": "bool", "default": False}, +if HAS_PYAVD: + ARGUMENT_SPEC = { + "configuration_dir": {"type": "str", "required": True}, + "structured_config_dir": {"type": "str", "required": True}, + "structured_config_suffix": {"type": "str", "default": "yml"}, + "device_list": {"type": "list", "elements": "str", "required": True}, + "strict_tags": {"type": "bool", "required": False, "default": False}, + "skip_missing_devices": {"type": "bool", "required": False, "default": False}, + "configlet_name_template": {"type": "str", "default": "AVD-${hostname}"}, + "cv_servers": {"type": "list", "elements": "str", "required": True}, + "cv_token": {"type": "str", "secret": True, "required": True}, + "cv_verify_certs": {"type": "bool", "default": True}, + "workspace": { + "type": "dict", + "options": { + "name": {"type": "str", "required": False}, + "description": {"type": "str", "required": False}, + "id": {"type": "str", "required": False}, + "requested_state": {"type": "str", "default": "built", "choices": ["pending", "built", "submitted", "abandoned", "deleted"]}, + "force": {"type": "bool", "default": False}, + }, }, - }, - "change_control": { - "type": "dict", - "options": { - "name": {"type": "str", "required": False}, - "description": {"type": "str", "required": False}, - "requested_state": {"type": "str", "default": "pending approval", "choices": ["pending approval", "approved", "running", "completed"]}, + "change_control": { + "type": "dict", + "options": { + "name": {"type": "str", "required": False}, + "description": {"type": "str", "required": False}, + "requested_state": {"type": "str", "default": "pending approval", "choices": ["pending approval", "approved", "running", "completed"]}, + }, }, - }, - "timeouts": { - "type": "dict", - "options": { - "workspace_build_timeout": {"type": "float", "default": CVTimeOuts.workspace_build_timeout}, - "change_control_creation_timeout": {"type": "float", "default": CVTimeOuts.change_control_creation_timeout}, + "timeouts": { + "type": "dict", + "options": { + "workspace_build_timeout": {"type": "float", "default": CVTimeOuts.workspace_build_timeout}, + "change_control_creation_timeout": {"type": "float", "default": CVTimeOuts.change_control_creation_timeout}, + }, }, - }, - "return_details": {"type": "bool", "required": False, "default": False}, -} + "return_details": {"type": "bool", "required": False, "default": False}, + } class ActionModule(ActionBase): From 06f14379f6a5af1441fadc16fccd75e24c5d68b8 Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 13:55:37 +0200 Subject: [PATCH 07/11] perhaps fixing tox --- python-avd/tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python-avd/tox.ini b/python-avd/tox.ini index ca22a4f4f89..1f9d9b4bbed 100644 --- a/python-avd/tox.ini +++ b/python-avd/tox.ini @@ -29,6 +29,7 @@ deps = PyYAML>=6.0.0 pydantic>=2.3.0 referencing>=0.35.0 + aristaproto>=0.1.1 commands = make test-dep # posargs allows to run only a specific test using @@ -42,6 +43,7 @@ deps = pydantic>=2.3.0 coverage[toml] referencing>=0.35.0 + aristaproto>=0.1.1 commands = coverage erase make test-dep From efb2c259953eceef8f2a5a3019741111357c858e Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 14:53:15 +0200 Subject: [PATCH 08/11] fix requirements --- python-avd/requirements.txt | 1 + python-avd/tox.ini | 2 -- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/python-avd/requirements.txt b/python-avd/requirements.txt index 5a8bcb3d781..939556a5546 100644 --- a/python-avd/requirements.txt +++ b/python-avd/requirements.txt @@ -3,3 +3,4 @@ cryptography>=38.0.4 deepmerge>=1.1.0 jinja2>=3.0 jsonschema>=4.10.3 +requests>=2.27.0 diff --git a/python-avd/tox.ini b/python-avd/tox.ini index 1f9d9b4bbed..ca22a4f4f89 100644 --- a/python-avd/tox.ini +++ b/python-avd/tox.ini @@ -29,7 +29,6 @@ deps = PyYAML>=6.0.0 pydantic>=2.3.0 referencing>=0.35.0 - aristaproto>=0.1.1 commands = make test-dep # posargs allows to run only a specific test using @@ -43,7 +42,6 @@ deps = pydantic>=2.3.0 coverage[toml] referencing>=0.35.0 - aristaproto>=0.1.1 commands = coverage erase make test-dep From 6be4aac7f5ea745eb65d0584fa6ffc2b623bb85c Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Thu, 20 Jun 2024 15:01:21 +0200 Subject: [PATCH 09/11] accept lower coverage because of cv api --- python-avd/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python-avd/pyproject.toml b/python-avd/pyproject.toml index 0ff94086b94..f7eb61b42fd 100644 --- a/python-avd/pyproject.toml +++ b/python-avd/pyproject.toml @@ -75,7 +75,7 @@ branch = true [tool.coverage.report] show_missing = true -fail_under = 89 +fail_under = 88 include = [ "pyavd/*" ] From e53b9017de49dbedfb7f69dade47f980099eadea Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Fri, 21 Jun 2024 08:30:49 +0200 Subject: [PATCH 10/11] update coverage target and exclude generated code --- python-avd/pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python-avd/pyproject.toml b/python-avd/pyproject.toml index f7eb61b42fd..9c793679c81 100644 --- a/python-avd/pyproject.toml +++ b/python-avd/pyproject.toml @@ -75,12 +75,12 @@ branch = true [tool.coverage.report] show_missing = true -fail_under = 88 +fail_under = 91 include = [ "pyavd/*" ] omit = [ - "pyavd/vendor/cv_client/*" + "pyavd/_cv/api/*" ] # Regexes for lines to exclude from consideration exclude_also = [ From 368436fea872a2a730b0bfea33573837ad50114d Mon Sep 17 00:00:00 2001 From: Claus Holbech Date: Fri, 21 Jun 2024 08:38:22 +0200 Subject: [PATCH 11/11] update coverage target and exclude generated code --- python-avd/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python-avd/pyproject.toml b/python-avd/pyproject.toml index 9c793679c81..0b272691d59 100644 --- a/python-avd/pyproject.toml +++ b/python-avd/pyproject.toml @@ -75,7 +75,7 @@ branch = true [tool.coverage.report] show_missing = true -fail_under = 91 +fail_under = 90 include = [ "pyavd/*" ]