diff --git a/.github/workflows/kuksa-client.yml b/.github/workflows/kuksa-client.yml index d23f53f..a29f41b 100644 --- a/.github/workflows/kuksa-client.yml +++ b/.github/workflows/kuksa-client.yml @@ -132,13 +132,18 @@ jobs: - name: Checkout kuksa-python-sdk uses: actions/checkout@v4 with: + fetch-depth: 0 submodules: 'true' - name: Install pip run: | python -m pip --quiet --no-input install --upgrade pip + - name: Initiate submodules + run: | + git submodule update --remote - name: Install dependencies with pip run: | cd kuksa-client + python3 -m proto pip install -r requirements.txt -e . pip install -r test-requirements.txt - name: Run tests diff --git a/.gitmodules b/.gitmodules index ade3e35..a3cfce0 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ -[submodule "submodules/kuksa-databroker"] + + +[submodule "kuksa-databroker"] path = submodules/kuksa-databroker - url = https://github.com/eclipse-kuksa/kuksa-databroker + url = https://github.com/eclipse-kuksa/kuksa-databroker.git + branch = feature/databroker-api-v2 diff --git a/docs/building.md b/docs/building.md index 8401a44..60ff816 100644 --- a/docs/building.md +++ b/docs/building.md @@ -7,6 +7,12 @@ git submodule update --init cd kuksa-client ``` +Hint: If you want to use another branch than master exchange the first command with + +```console +git submodule update --recursive --remote --init +``` + First we suggest you create a dedicated [python virtual environment](https://docs.python.org/3/library/venv.html) for kuksa-client: ```console @@ -15,6 +21,12 @@ python3 -m venv ~/.venv/kuksa-client source ~/.venv/kuksa-client/bin/activate # Run this every time you want to activate kuksa-client's virtual environment ``` +To use the right api interfaces of databroker run the following: +```console +python3 -m proto +``` +This should copy the corresponding proto files to the kuksa-client directory. + Your prompt should change to somehting indicating you are in the virutal environment now, e.g. ```console diff --git a/kuksa-client/Dockerfile b/kuksa-client/Dockerfile index 4f72368..73b7e3c 100644 --- a/kuksa-client/Dockerfile +++ b/kuksa-client/Dockerfile @@ -18,6 +18,9 @@ RUN pip install --upgrade pip build pyinstaller # We must copy the whole repository otherwise version lookup by tag would not work COPY . /kuksa-python-sdk/ WORKDIR /kuksa-python-sdk/kuksa-client +RUN git submodule update --recursive --remote --init +# install files from submodules to kuksa-client repo to generate protos out of it +RUN python3 -m proto RUN python3 -m build # We install globally on build container, so pyinstaller can easily gather all files diff --git a/kuksa-client/kuksa/__init__.py b/kuksa-client/kuksa/__init__.py deleted file mode 100644 index 720b14f..0000000 --- a/kuksa-client/kuksa/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# /******************************************************************************** -# * Copyright (c) 2023 Contributors to the Eclipse Foundation -# * -# * See the NOTICE file(s) distributed with this work for additional -# * information regarding copyright ownership. -# * -# * This program and the accompanying materials are made available under the -# * terms of the Apache License 2.0 which is available at -# * http://www.apache.org/licenses/LICENSE-2.0 -# * -# * SPDX-License-Identifier: Apache-2.0 -# ********************************************************************************/ diff --git a/kuksa-client/kuksa/val/__init__.py b/kuksa-client/kuksa/val/__init__.py deleted file mode 100644 index 720b14f..0000000 --- a/kuksa-client/kuksa/val/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# /******************************************************************************** -# * Copyright (c) 2023 Contributors to the Eclipse Foundation -# * -# * See the NOTICE file(s) distributed with this work for additional -# * information regarding copyright ownership. -# * -# * This program and the accompanying materials are made available under the -# * terms of the Apache License 2.0 which is available at -# * http://www.apache.org/licenses/LICENSE-2.0 -# * -# * SPDX-License-Identifier: Apache-2.0 -# ********************************************************************************/ diff --git a/kuksa-client/kuksa/val/v1/README.md b/kuksa-client/kuksa/val/v1/README.md deleted file mode 120000 index 07f51a3..0000000 --- a/kuksa-client/kuksa/val/v1/README.md +++ /dev/null @@ -1 +0,0 @@ -../../../../submodules/kuksa-databroker/proto/kuksa/val/v1/README.md \ No newline at end of file diff --git a/kuksa-client/kuksa/val/v1/__init__.py b/kuksa-client/kuksa/val/v1/__init__.py deleted file mode 100644 index 720b14f..0000000 --- a/kuksa-client/kuksa/val/v1/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# /******************************************************************************** -# * Copyright (c) 2023 Contributors to the Eclipse Foundation -# * -# * See the NOTICE file(s) distributed with this work for additional -# * information regarding copyright ownership. -# * -# * This program and the accompanying materials are made available under the -# * terms of the Apache License 2.0 which is available at -# * http://www.apache.org/licenses/LICENSE-2.0 -# * -# * SPDX-License-Identifier: Apache-2.0 -# ********************************************************************************/ diff --git a/kuksa-client/kuksa/val/v1/types.proto b/kuksa-client/kuksa/val/v1/types.proto deleted file mode 120000 index 446f09c..0000000 --- a/kuksa-client/kuksa/val/v1/types.proto +++ /dev/null @@ -1 +0,0 @@ -../../../../submodules/kuksa-databroker/proto/kuksa/val/v1/types.proto \ No newline at end of file diff --git a/kuksa-client/kuksa/val/v1/val.proto b/kuksa-client/kuksa/val/v1/val.proto deleted file mode 120000 index 3b80543..0000000 --- a/kuksa-client/kuksa/val/v1/val.proto +++ /dev/null @@ -1 +0,0 @@ -../../../../submodules/kuksa-databroker/proto/kuksa/val/v1/val.proto \ No newline at end of file diff --git a/kuksa-client/kuksa_client/__main__.py b/kuksa-client/kuksa_client/__main__.py index 2b29623..69a83e0 100755 --- a/kuksa-client/kuksa_client/__main__.py +++ b/kuksa-client/kuksa_client/__main__.py @@ -136,13 +136,14 @@ def path_completer(self, text, line, begidx, endidx): def subscribeCallback(self, logPath, resp): if logPath is None: - self.async_alert( - highlight( - json.dumps(json.loads(resp), indent=2), - lexers.JsonLexer(), - formatters.TerminalFormatter(), + with self.terminal_lock: + self.async_alert( + highlight( + json.dumps(json.loads(resp), indent=2), + lexers.JsonLexer(), + formatters.TerminalFormatter(), + ) ) - ) else: with logPath.open("a", encoding="utf-8") as logFile: logFile.write(resp + "\n") diff --git a/kuksa-client/kuksa_client/cli_backend/grpc.py b/kuksa-client/kuksa_client/cli_backend/grpc.py index 11ab1ae..85081b4 100644 --- a/kuksa-client/kuksa_client/cli_backend/grpc.py +++ b/kuksa-client/kuksa_client/cli_backend/grpc.py @@ -81,9 +81,18 @@ def __init__(self, config): self.run = False self.AttrDict = { - "value": (kuksa_client.grpc.Field.VALUE, kuksa_client.grpc.View.CURRENT_VALUE), - "targetValue": (kuksa_client.grpc.Field.ACTUATOR_TARGET, kuksa_client.grpc.View.TARGET_VALUE), - "metadata": (kuksa_client.grpc.Field.METADATA, kuksa_client.grpc.View.METADATA), + "value": ( + kuksa_client.grpc.Field.VALUE, + kuksa_client.grpc.View.CURRENT_VALUE, + ), + "targetValue": ( + kuksa_client.grpc.Field.ACTUATOR_TARGET, + kuksa_client.grpc.View.TARGET_VALUE, + ), + "metadata": ( + kuksa_client.grpc.Field.METADATA, + kuksa_client.grpc.View.METADATA, + ), } def connection_established(self) -> bool: @@ -112,8 +121,10 @@ def getValue(self, path: str, attribute="value", timeout=5): def getValues(self, paths: Iterable[str], attribute="value", timeout=5): if attribute in self.AttrDict: field, view = self.AttrDict[attribute] - entries = [kuksa_client.grpc.EntryRequest( - path=path, view=view, fields=(field,)) for path in paths] + entries = [ + kuksa_client.grpc.EntryRequest(path=path, view=view, fields=(field,)) + for path in paths + ] requestArgs = {'entries': entries} return self._sendReceiveMsg(("get", requestArgs), timeout) @@ -127,15 +138,19 @@ def setValues(self, updates: Dict[str, Any], attribute="value", timeout=5): if attribute in self.AttrDict: field, _ = self.AttrDict[attribute] entry_updates = [] + try_v2 = False for path, value in updates.items(): if field is kuksa_client.grpc.Field.VALUE: entry = kuksa_client.grpc.DataEntry( - path=path, value=kuksa_client.grpc.Datapoint(value=value)) + path=path, + value=kuksa_client.grpc.Datapoint(value=value), + ) + try_v2 = True elif field is kuksa_client.grpc.Field.ACTUATOR_TARGET: entry = kuksa_client.grpc.DataEntry( - path=path, actuator_target=kuksa_client.grpc.Datapoint( - value=value), + path=path, + actuator_target=kuksa_client.grpc.Datapoint(value=value), ) elif field is kuksa_client.grpc.Field.METADATA: try: @@ -143,12 +158,13 @@ def setValues(self, updates: Dict[str, Any], attribute="value", timeout=5): except json.JSONDecodeError: return json.dumps({"error": "Metadata value needs to be a valid JSON object"}) entry = kuksa_client.grpc.DataEntry( - path=path, metadata=kuksa_client.grpc.Metadata.from_dict( - metadata_dict), + path=path, + metadata=kuksa_client.grpc.Metadata.from_dict(metadata_dict), ) - entry_updates.append(kuksa_client.grpc.EntryUpdate( - entry=entry, fields=(field,))) - requestArgs = {'updates': entry_updates} + entry_updates.append( + kuksa_client.grpc.EntryUpdate(entry=entry, fields=(field,)) + ) + requestArgs = {"updates": entry_updates, "try_v2": try_v2} return self._sendReceiveMsg(("set", requestArgs), timeout) return json.dumps({"error": "Invalid Attribute"}) @@ -175,11 +191,14 @@ def subscribe(self, path: str, callback, attribute="value", timeout=5): def subscribeMultiple(self, paths: Iterable[str], callback, attribute="value", timeout=5): if attribute in self.AttrDict: field, view = self.AttrDict[attribute] - entries = [kuksa_client.grpc.SubscribeEntry( - path=path, view=view, fields=(field,)) for path in paths] + entries = [ + kuksa_client.grpc.SubscribeEntry(path=path, view=view, fields=(field,)) + for path in paths + ] requestArgs = { - 'entries': entries, - 'callback': callback_wrapper(callback), + "entries": entries, + "try_v2": True, + "callback": callback_wrapper(callback), } return self._sendReceiveMsg(("subscribe", requestArgs), timeout) @@ -222,8 +241,7 @@ def _sendReceiveMsg(self, req, timeout): # Async function to handle the gRPC calls async def _grpcHandler(self, vss_client: kuksa_client.grpc.aio.VSSClient): self.run = True - subscriber_manager = kuksa_client.grpc.aio.SubscriberManager( - vss_client) + subscriber_manager = kuksa_client.grpc.aio.SubscriberManager(vss_client) self.grpc_connection_established = True while self.run: try: @@ -273,7 +291,9 @@ def updateVSSTree(self, jsonStr, timeout=5): async def mainLoop(self): if self.insecure: - async with kuksa_client.grpc.aio.VSSClient(self.serverIP, self.serverPort, token=self.token) as vss_client: + async with kuksa_client.grpc.aio.VSSClient( + self.serverIP, self.serverPort, token=self.token + ) as vss_client: logger.info("gRPC channel connected.") await self._grpcHandler(vss_client) else: @@ -282,7 +302,7 @@ async def mainLoop(self): self.serverPort, root_certificates=self.cacertificate, tls_server_name=self.tls_server_name, - token=self.token + token=self.token, ) as vss_client: logger.info("Secure gRPC channel connected.") await self._grpcHandler(vss_client) diff --git a/kuksa-client/kuksa_client/grpc/__init__.py b/kuksa-client/kuksa_client/grpc/__init__.py index 173c558..9ae1adc 100644 --- a/kuksa-client/kuksa_client/grpc/__init__.py +++ b/kuksa-client/kuksa_client/grpc/__init__.py @@ -36,75 +36,78 @@ import grpc from grpc import RpcError -from kuksa.val.v1 import types_pb2 -from kuksa.val.v1 import val_pb2 -from kuksa.val.v1 import val_pb2_grpc +from kuksa.val.v1 import types_pb2 as types_v1 +from kuksa.val.v1 import val_pb2 as val_v1 +from kuksa.val.v1 import val_pb2_grpc as val_grpc_v1 +from kuksa.val.v2 import types_pb2 as types_v2 +from kuksa.val.v2 import val_pb2 as val_v2 +from kuksa.val.v2 import val_pb2_grpc as val_grpc_v2 logger = logging.getLogger(__name__) class DataType(enum.IntEnum): - UNSPECIFIED = types_pb2.DATA_TYPE_UNSPECIFIED - STRING = types_pb2.DATA_TYPE_STRING - BOOLEAN = types_pb2.DATA_TYPE_BOOLEAN - INT8 = types_pb2.DATA_TYPE_INT8 - INT16 = types_pb2.DATA_TYPE_INT16 - INT32 = types_pb2.DATA_TYPE_INT32 - INT64 = types_pb2.DATA_TYPE_INT64 - UINT8 = types_pb2.DATA_TYPE_UINT8 - UINT16 = types_pb2.DATA_TYPE_UINT16 - UINT32 = types_pb2.DATA_TYPE_UINT32 - UINT64 = types_pb2.DATA_TYPE_UINT64 - FLOAT = types_pb2.DATA_TYPE_FLOAT - DOUBLE = types_pb2.DATA_TYPE_DOUBLE - TIMESTAMP = types_pb2.DATA_TYPE_TIMESTAMP - STRING_ARRAY = types_pb2.DATA_TYPE_STRING_ARRAY - BOOLEAN_ARRAY = types_pb2.DATA_TYPE_BOOLEAN_ARRAY - INT8_ARRAY = types_pb2.DATA_TYPE_INT8_ARRAY - INT16_ARRAY = types_pb2.DATA_TYPE_INT16_ARRAY - INT32_ARRAY = types_pb2.DATA_TYPE_INT32_ARRAY - INT64_ARRAY = types_pb2.DATA_TYPE_INT64_ARRAY - UINT8_ARRAY = types_pb2.DATA_TYPE_UINT8_ARRAY - UINT16_ARRAY = types_pb2.DATA_TYPE_UINT16_ARRAY - UINT32_ARRAY = types_pb2.DATA_TYPE_UINT32_ARRAY - UINT64_ARRAY = types_pb2.DATA_TYPE_UINT64_ARRAY - FLOAT_ARRAY = types_pb2.DATA_TYPE_FLOAT_ARRAY - DOUBLE_ARRAY = types_pb2.DATA_TYPE_DOUBLE_ARRAY - TIMESTAMP_ARRAY = types_pb2.DATA_TYPE_TIMESTAMP_ARRAY + UNSPECIFIED = types_v1.DATA_TYPE_UNSPECIFIED + STRING = types_v1.DATA_TYPE_STRING + BOOLEAN = types_v1.DATA_TYPE_BOOLEAN + INT8 = types_v1.DATA_TYPE_INT8 + INT16 = types_v1.DATA_TYPE_INT16 + INT32 = types_v1.DATA_TYPE_INT32 + INT64 = types_v1.DATA_TYPE_INT64 + UINT8 = types_v1.DATA_TYPE_UINT8 + UINT16 = types_v1.DATA_TYPE_UINT16 + UINT32 = types_v1.DATA_TYPE_UINT32 + UINT64 = types_v1.DATA_TYPE_UINT64 + FLOAT = types_v1.DATA_TYPE_FLOAT + DOUBLE = types_v1.DATA_TYPE_DOUBLE + TIMESTAMP = types_v1.DATA_TYPE_TIMESTAMP + STRING_ARRAY = types_v1.DATA_TYPE_STRING_ARRAY + BOOLEAN_ARRAY = types_v1.DATA_TYPE_BOOLEAN_ARRAY + INT8_ARRAY = types_v1.DATA_TYPE_INT8_ARRAY + INT16_ARRAY = types_v1.DATA_TYPE_INT16_ARRAY + INT32_ARRAY = types_v1.DATA_TYPE_INT32_ARRAY + INT64_ARRAY = types_v1.DATA_TYPE_INT64_ARRAY + UINT8_ARRAY = types_v1.DATA_TYPE_UINT8_ARRAY + UINT16_ARRAY = types_v1.DATA_TYPE_UINT16_ARRAY + UINT32_ARRAY = types_v1.DATA_TYPE_UINT32_ARRAY + UINT64_ARRAY = types_v1.DATA_TYPE_UINT64_ARRAY + FLOAT_ARRAY = types_v1.DATA_TYPE_FLOAT_ARRAY + DOUBLE_ARRAY = types_v1.DATA_TYPE_DOUBLE_ARRAY + TIMESTAMP_ARRAY = types_v1.DATA_TYPE_TIMESTAMP_ARRAY class EntryType(enum.IntEnum): - UNSPECIFIED = types_pb2.ENTRY_TYPE_UNSPECIFIED - ATTRIBUTE = types_pb2.ENTRY_TYPE_ATTRIBUTE - SENSOR = types_pb2.ENTRY_TYPE_SENSOR - ACTUATOR = types_pb2.ENTRY_TYPE_ACTUATOR + UNSPECIFIED = types_v1.ENTRY_TYPE_UNSPECIFIED + ATTRIBUTE = types_v1.ENTRY_TYPE_ATTRIBUTE + SENSOR = types_v1.ENTRY_TYPE_SENSOR + ACTUATOR = types_v1.ENTRY_TYPE_ACTUATOR class View(enum.IntEnum): - UNSPECIFIED = types_pb2.VIEW_UNSPECIFIED - CURRENT_VALUE = types_pb2.VIEW_CURRENT_VALUE - TARGET_VALUE = types_pb2.VIEW_TARGET_VALUE - METADATA = types_pb2.VIEW_METADATA - FIELDS = types_pb2.VIEW_FIELDS - ALL = types_pb2.VIEW_ALL + UNSPECIFIED = types_v1.VIEW_UNSPECIFIED + CURRENT_VALUE = types_v1.VIEW_CURRENT_VALUE + TARGET_VALUE = types_v1.VIEW_TARGET_VALUE + METADATA = types_v1.VIEW_METADATA + FIELDS = types_v1.VIEW_FIELDS + ALL = types_v1.VIEW_ALL class Field(enum.IntEnum): - UNSPECIFIED = types_pb2.FIELD_UNSPECIFIED - PATH = types_pb2.FIELD_PATH - VALUE = types_pb2.FIELD_VALUE - ACTUATOR_TARGET = types_pb2.FIELD_ACTUATOR_TARGET - METADATA = types_pb2.FIELD_METADATA - METADATA_DATA_TYPE = types_pb2.FIELD_METADATA_DATA_TYPE - METADATA_DESCRIPTION = types_pb2.FIELD_METADATA_DESCRIPTION - METADATA_ENTRY_TYPE = types_pb2.FIELD_METADATA_ENTRY_TYPE - METADATA_COMMENT = types_pb2.FIELD_METADATA_COMMENT - METADATA_DEPRECATION = types_pb2.FIELD_METADATA_DEPRECATION - METADATA_UNIT = types_pb2.FIELD_METADATA_UNIT - METADATA_VALUE_RESTRICTION = types_pb2.FIELD_METADATA_VALUE_RESTRICTION - METADATA_ACTUATOR = types_pb2.FIELD_METADATA_ACTUATOR - METADATA_SENSOR = types_pb2.FIELD_METADATA_SENSOR - METADATA_ATTRIBUTE = types_pb2.FIELD_METADATA_ATTRIBUTE + UNSPECIFIED = types_v1.FIELD_UNSPECIFIED + PATH = types_v1.FIELD_PATH + VALUE = types_v1.FIELD_VALUE + ACTUATOR_TARGET = types_v1.FIELD_ACTUATOR_TARGET + METADATA = types_v1.FIELD_METADATA + METADATA_DATA_TYPE = types_v1.FIELD_METADATA_DATA_TYPE + METADATA_DESCRIPTION = types_v1.FIELD_METADATA_DESCRIPTION + METADATA_ENTRY_TYPE = types_v1.FIELD_METADATA_ENTRY_TYPE + METADATA_COMMENT = types_v1.FIELD_METADATA_COMMENT + METADATA_DEPRECATION = types_v1.FIELD_METADATA_DEPRECATION + METADATA_UNIT = types_v1.FIELD_METADATA_UNIT + METADATA_VALUE_RESTRICTION = types_v1.FIELD_METADATA_VALUE_RESTRICTION + METADATA_ACTUATOR = types_v1.FIELD_METADATA_ACTUATOR + METADATA_SENSOR = types_v1.FIELD_METADATA_SENSOR + METADATA_ATTRIBUTE = types_v1.FIELD_METADATA_ATTRIBUTE class MetadataField(enum.Enum): @@ -132,10 +135,17 @@ def from_grpc_error(cls, error: RpcError): grpc_code, grpc_reason = error.code().value # TODO: Maybe details could hold an actual Error and/or repeated DataEntryError protobuf messages. # This would allow 'code' to be an actual HTTP/VISS status code not a gRPC one. - return cls(error={'code': grpc_code, 'reason': grpc_reason, 'message': error.details()}, errors=[]) + return cls( + error={ + "code": grpc_code, + "reason": grpc_reason, + "message": error.details(), + }, + errors=[], + ) def to_dict(self) -> Dict[str, Any]: - return {'error': self.error, 'errors': self.errors} + return {"error": self.error, "errors": self.errors} @dataclasses.dataclass @@ -157,39 +167,47 @@ class Metadata: # No support for entry_specific for now. @classmethod - def from_message(cls, message: types_pb2.Metadata): - metadata = cls(data_type=DataType(message.data_type), - entry_type=EntryType(message.entry_type)) - for field in ('description', 'comment', 'deprecation', 'unit'): + def from_message(cls, message: types_v1.Metadata): + metadata = cls( + data_type=DataType(message.data_type), + entry_type=EntryType(message.entry_type), + ) + for field in ("description", "comment", "deprecation", "unit"): if message.HasField(field): setattr(metadata, field, getattr(message, field)) - if message.HasField('value_restriction'): - restriction_type = message.value_restriction.WhichOneof('type') + if message.HasField("value_restriction"): + restriction_type = message.value_restriction.WhichOneof("type") # Make sure that a type actually is set if restriction_type: - value_restriction = getattr( - message.value_restriction, restriction_type) + value_restriction = getattr(message.value_restriction, restriction_type) metadata.value_restriction = ValueRestriction() # All types except string support min/max - if restriction_type != 'string': - for field in ('min', 'max'): + if restriction_type != "string": + for field in ("min", "max"): if value_restriction.HasField(field): - setattr(metadata.value_restriction, field, - getattr(value_restriction, field)) + setattr( + metadata.value_restriction, + field, + getattr(value_restriction, field), + ) if value_restriction.allowed_values: metadata.value_restriction.allowed_values = list( - value_restriction.allowed_values) + value_restriction.allowed_values + ) return metadata # pylint: disable=too-many-branches - def to_message(self, value_type: DataType = DataType.UNSPECIFIED) -> types_pb2.Metadata: + def to_message( + self, value_type: DataType = DataType.UNSPECIFIED + ) -> types_v1.Metadata: """ to_message/from_message aligned to use None rather than empty list for representing allowed values in value restrictions """ - message = types_pb2.Metadata( - data_type=self.data_type.value, entry_type=self.entry_type.value) - for field in ('description', 'comment', 'deprecation', 'unit'): + message = types_v1.Metadata( + data_type=self.data_type.value, entry_type=self.entry_type.value + ) + for field in ("description", "comment", "deprecation", "unit"): field_value = getattr(self, field, None) if field_value is not None: setattr(message, field, field_value) @@ -206,14 +224,15 @@ def to_message(self, value_type: DataType = DataType.UNSPECIFIED) -> types_pb2.M ): if self.value_restriction.min is not None: message.value_restriction.signed.min = int( - self.value_restriction.min) + self.value_restriction.min + ) if self.value_restriction.max is not None: message.value_restriction.signed.max = int( - self.value_restriction.max) + self.value_restriction.max + ) if self.value_restriction.allowed_values: message.value_restriction.signed.allowed_values.extend( - (int(value) - for value in self.value_restriction.allowed_values), + (int(value) for value in self.value_restriction.allowed_values), ) elif value_type in ( DataType.UINT8, @@ -227,14 +246,15 @@ def to_message(self, value_type: DataType = DataType.UNSPECIFIED) -> types_pb2.M ): if self.value_restriction.min is not None: message.value_restriction.unsigned.min = int( - self.value_restriction.min) + self.value_restriction.min + ) if self.value_restriction.max is not None: message.value_restriction.unsigned.max = int( - self.value_restriction.max) + self.value_restriction.max + ) if self.value_restriction.allowed_values: message.value_restriction.unsigned.allowed_values.extend( - (int(value) - for value in self.value_restriction.allowed_values), + (int(value) for value in self.value_restriction.allowed_values), ) elif value_type in ( DataType.FLOAT, @@ -244,14 +264,18 @@ def to_message(self, value_type: DataType = DataType.UNSPECIFIED) -> types_pb2.M ): if self.value_restriction.min is not None: message.value_restriction.floating_point.min = float( - self.value_restriction.min) + self.value_restriction.min + ) if self.value_restriction.max is not None: message.value_restriction.floating_point.max = float( - self.value_restriction.max) + self.value_restriction.max + ) if self.value_restriction.allowed_values: message.value_restriction.floating_point.allowed_values.extend( - (float(value) - for value in self.value_restriction.allowed_values), + ( + float(value) + for value in self.value_restriction.allowed_values + ), ) elif value_type in ( DataType.STRING, @@ -259,54 +283,57 @@ def to_message(self, value_type: DataType = DataType.UNSPECIFIED) -> types_pb2.M ): if self.value_restriction.allowed_values: message.value_restriction.string.allowed_values.extend( - (str(value) - for value in self.value_restriction.allowed_values), + (str(value) for value in self.value_restriction.allowed_values), ) else: raise ValueError( - f"Cannot set value_restriction from data type {value_type.name}") + f"Cannot set value_restriction from data type {value_type.name}" + ) return message + # pylint: enable=too-many-branches @classmethod def from_dict(cls, metadata_dict: Dict[str, Any]): - data_type = metadata_dict.get('data_type', DataType.UNSPECIFIED) + data_type = metadata_dict.get("data_type", DataType.UNSPECIFIED) if isinstance(data_type, str): data_type = getattr(DataType, data_type) else: data_type = DataType(data_type) - entry_type = metadata_dict.get('entry_type', EntryType.UNSPECIFIED) + entry_type = metadata_dict.get("entry_type", EntryType.UNSPECIFIED) if isinstance(entry_type, str): entry_type = getattr(EntryType, entry_type) else: entry_type = EntryType(entry_type) instance = cls(data_type=data_type, entry_type=entry_type) - for field in ('description', 'comment', 'deprecation', 'unit'): + for field in ("description", "comment", "deprecation", "unit"): field_value = metadata_dict.get(field, None) if field_value is not None: setattr(instance, field, str(field_value)) - value_restriction = metadata_dict.get('value_restriction') + value_restriction = metadata_dict.get("value_restriction") if value_restriction is not None: instance.value_restriction = ValueRestriction() - for field in ('min', 'max', 'allowed_values'): + for field in ("min", "max", "allowed_values"): field_value = value_restriction.get(field) if field_value is not None: setattr(instance.value_restriction, field, field_value) return instance def to_dict(self) -> Dict[str, Any]: - out_dict = {'data_type': self.data_type.name, - 'entry_type': self.entry_type.name} - for field in ('description', 'comment', 'deprecation', 'unit'): + out_dict = { + "data_type": self.data_type.name, + "entry_type": self.entry_type.name, + } + for field in ("description", "comment", "deprecation", "unit"): field_value = getattr(self, field, None) if field_value is not None: out_dict[field] = field_value if self.value_restriction is not None: - out_dict['value_restriction'] = {} - for field in ('min', 'max', 'allowed_values'): + out_dict["value_restriction"] = {} + for field in ("min", "max", "allowed_values"): field_value = getattr(self.value_restriction, field, None) if field_value is not None: - out_dict['value_restriction'][field] = field_value + out_dict["value_restriction"][field] = field_value return out_dict @@ -316,32 +343,34 @@ class Datapoint: timestamp: Optional[datetime.datetime] = None @classmethod - def from_message(cls, message: types_pb2.Datapoint): + def from_message(cls, message: types_v1.Datapoint): """ Return internal Datapoint representation or None on error """ - if message.WhichOneof('value') is None: + if message.WhichOneof("value") is None: logger.warning("No value provided in datapoint!") return None - if message.HasField('timestamp'): + if message.HasField("timestamp"): # gRPC timestamp supports date up to including year 9999 # If timestamp by any reason contains a larger number for seconds than supported # you may get an overflow error try: timestamp = message.timestamp.ToDatetime( - tzinfo=datetime.timezone.utc, - ) + tzinfo=datetime.timezone.utc, + ) except ValueError: - logger.error("Timestamp %d out of accepted range, value ignored!", - message.timestamp.seconds) + logger.error( + "Timestamp %d out of accepted range, value ignored!", + message.timestamp.seconds, + ) return None else: timestamp = None return cls( - value=getattr(message, message.WhichOneof('value')), + value=getattr(message, message.WhichOneof("value")), timestamp=timestamp, ) @@ -351,7 +380,7 @@ def cast_array_values(cast, array): Note that input value to this function is not the same as given if you use kuksa-client command line as parts (e.g. surrounding quotes) are removed by shell, and then do_setValue also do some magic. """ - array = array.strip('[]') + array = array.strip("[]") # Split the input string into separate values # First alternative, not quotes including escaped single or double quote, ends at comma, whitespace or EOL @@ -359,22 +388,22 @@ def cast_array_values(cast, array): # double quote # Third is similar but for single quote # Using raw strings with surrounding single/double quotes to minimize need for escapes - pattern = r'(?:\\"|\\' + \ - r"'|[^'" + r'",])+|"(?:\\"|[^"])*"|' + \ - r"'(?:\\'|[^'])*'" + pattern = ( + r'(?:\\"|\\' + r"'|[^'" + r'",])+|"(?:\\"|[^"])*"|' + r"'(?:\\'|[^'])*'" + ) values = re.findall(pattern, array) for item in values: # We may in some cases match blanks, that is intended as we want to be able to write arguments like # My Way # ... without quotes - if item.strip() == '': + if item.strip() == "": # skip pass else: yield cast(item) def cast_bool(value) -> bool: - if value in ('False', 'false', 'F', 'f'): + if value in ("False", "false", "F", "f"): value = 0 return bool(value) @@ -391,12 +420,12 @@ def cast_str(value) -> str: if new_val.startswith("'") and new_val.endswith("'"): new_val = new_val[1:-1] # Replace escaped quotes with normal quotes - new_val = new_val.replace('\\\"', '\"') - new_val = new_val.replace("\\\'", "\'") + new_val = new_val.replace('\\"', '"') + new_val = new_val.replace("\\'", "'") return new_val - def to_message(self, value_type: DataType) -> types_pb2.Datapoint: - message = types_pb2.Datapoint() + def v1_to_message(self, value_type: DataType) -> types_v1.Datapoint: + message = types_v1.Datapoint() def set_array_attr(obj, attr, values): array = getattr(obj, attr) @@ -404,42 +433,78 @@ def set_array_attr(obj, attr, values): array.values.extend(values) field, set_field, cast_field = { - DataType.INT8: ('int32', setattr, int), - DataType.INT16: ('int32', setattr, int), - DataType.INT32: ('int32', setattr, int), - DataType.UINT8: ('uint32', setattr, int), - DataType.UINT16: ('uint32', setattr, int), - DataType.UINT32: ('uint32', setattr, int), - DataType.UINT64: ('uint64', setattr, int), - DataType.INT64: ('int64', setattr, int), - DataType.FLOAT: ('float', setattr, float), - DataType.DOUBLE: ('double', setattr, float), - DataType.BOOLEAN: ('bool', setattr, Datapoint.cast_bool), - DataType.STRING: ('string', setattr, Datapoint.cast_str), - DataType.INT8_ARRAY: ('int32_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.INT16_ARRAY: ('int32_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.INT32_ARRAY: ('int32_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.UINT8_ARRAY: ('uint32_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.UINT16_ARRAY: ('uint32_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.UINT32_ARRAY: ('uint32_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.UINT64_ARRAY: ('uint64_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.INT64_ARRAY: ('int64_array', set_array_attr, - lambda array: Datapoint.cast_array_values(int, array)), - DataType.FLOAT_ARRAY: ('float_array', set_array_attr, - lambda array: Datapoint.cast_array_values(float, array)), - DataType.DOUBLE_ARRAY: ('double_array', set_array_attr, - lambda array: Datapoint.cast_array_values(float, array)), - DataType.BOOLEAN_ARRAY: ('bool_array', set_array_attr, - lambda array: Datapoint.cast_array_values(Datapoint.cast_bool, array)), - DataType.STRING_ARRAY: ('string_array', set_array_attr, - lambda array: Datapoint.cast_array_values(Datapoint.cast_str, array)), + DataType.INT8: ("int32", setattr, int), + DataType.INT16: ("int32", setattr, int), + DataType.INT32: ("int32", setattr, int), + DataType.UINT8: ("uint32", setattr, int), + DataType.UINT16: ("uint32", setattr, int), + DataType.UINT32: ("uint32", setattr, int), + DataType.UINT64: ("uint64", setattr, int), + DataType.INT64: ("int64", setattr, int), + DataType.FLOAT: ("float", setattr, float), + DataType.DOUBLE: ("double", setattr, float), + DataType.BOOLEAN: ("bool", setattr, Datapoint.cast_bool), + DataType.STRING: ("string", setattr, Datapoint.cast_str), + DataType.INT8_ARRAY: ( + "int32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.INT16_ARRAY: ( + "int32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.INT32_ARRAY: ( + "int32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT8_ARRAY: ( + "uint32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT16_ARRAY: ( + "uint32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT32_ARRAY: ( + "uint32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT64_ARRAY: ( + "uint64_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.INT64_ARRAY: ( + "int64_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.FLOAT_ARRAY: ( + "float_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(float, array), + ), + DataType.DOUBLE_ARRAY: ( + "double_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(float, array), + ), + DataType.BOOLEAN_ARRAY: ( + "bool_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(Datapoint.cast_bool, array), + ), + DataType.STRING_ARRAY: ( + "string_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(Datapoint.cast_str, array), + ), }.get(value_type, (None, None, None)) if self.value is not None: if all((field, set_field, cast_field)): @@ -453,12 +518,108 @@ def set_array_attr(obj, attr, values): message.timestamp.FromDatetime(self.timestamp) return message + def v2_to_message(self, value_type: DataType) -> types_v2.Datapoint: + message = types_v2.Datapoint() + value = types_v2.Value() + + def set_array_attr(obj, attr, values): + array = getattr(obj, attr) + array.Clear() + array.values.extend(values) + + field, set_field, cast_field = { + DataType.INT8: ("int32", setattr, int), + DataType.INT16: ("int32", setattr, int), + DataType.INT32: ("int32", setattr, int), + DataType.UINT8: ("uint32", setattr, int), + DataType.UINT16: ("uint32", setattr, int), + DataType.UINT32: ("uint32", setattr, int), + DataType.UINT64: ("uint64", setattr, int), + DataType.INT64: ("int64", setattr, int), + DataType.FLOAT: ("float", setattr, float), + DataType.DOUBLE: ("double", setattr, float), + DataType.BOOLEAN: ("bool", setattr, Datapoint.cast_bool), + DataType.STRING: ("string", setattr, Datapoint.cast_str), + DataType.INT8_ARRAY: ( + "int32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.INT16_ARRAY: ( + "int32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.INT32_ARRAY: ( + "int32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT8_ARRAY: ( + "uint32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT16_ARRAY: ( + "uint32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT32_ARRAY: ( + "uint32_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.UINT64_ARRAY: ( + "uint64_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.INT64_ARRAY: ( + "int64_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(int, array), + ), + DataType.FLOAT_ARRAY: ( + "float_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(float, array), + ), + DataType.DOUBLE_ARRAY: ( + "double_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(float, array), + ), + DataType.BOOLEAN_ARRAY: ( + "bool_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(Datapoint.cast_bool, array), + ), + DataType.STRING_ARRAY: ( + "string_array", + set_array_attr, + lambda array: Datapoint.cast_array_values(Datapoint.cast_str, array), + ), + }.get(value_type, (None, None, None)) + if self.value is not None: + if all((field, set_field, cast_field)): + set_field(value, field, cast_field(self.value)) + message.value.CopyFrom(value) + else: + # Either DataType.TIMESTAMP, DataType.TIMESTAMP_ARRAY or DataType.UNSPECIFIED... + raise ValueError( + f"Cannot determine which field to set with data type {value_type} from value {self.value}", + ) + if self.timestamp is not None: + message.timestamp.FromDatetime(self.timestamp) + return message + def to_dict(self) -> Dict[str, Any]: out_dict = {} if self.value is not None: - out_dict['value'] = self.value + out_dict["value"] = self.value if self.timestamp is not None: - out_dict['timestamp'] = self.timestamp.isoformat() + out_dict["timestamp"] = self.timestamp.isoformat() return out_dict @@ -473,37 +634,38 @@ class DataEntry: value_type: DataType = DataType.UNSPECIFIED @classmethod - def from_message(cls, message: types_pb2.DataEntry): - entry_kwargs = {'path': message.path} - if message.HasField('value'): - entry_kwargs['value'] = Datapoint.from_message(message.value) - if message.HasField('actuator_target'): - entry_kwargs['actuator_target'] = Datapoint.from_message( - message.actuator_target) - if message.HasField('metadata'): - entry_kwargs['metadata'] = Metadata.from_message(message.metadata) + def from_message(cls, message: types_v1.DataEntry): + entry_kwargs = {"path": message.path} + if message.HasField("value"): + entry_kwargs["value"] = Datapoint.from_message(message.value) + if message.HasField("actuator_target"): + entry_kwargs["actuator_target"] = Datapoint.from_message( + message.actuator_target + ) + if message.HasField("metadata"): + entry_kwargs["metadata"] = Metadata.from_message(message.metadata) return cls(**entry_kwargs) - def to_message(self) -> types_pb2.DataEntry: - message = types_pb2.DataEntry(path=self.path) + def to_message(self) -> types_v1.DataEntry: + message = types_v1.DataEntry(path=self.path) if self.value is not None: - message.value.MergeFrom(self.value.to_message(self.value_type)) + message.value.MergeFrom(self.value.v1_to_message(self.value_type)) if self.actuator_target is not None: message.actuator_target.MergeFrom( - self.actuator_target.to_message(self.value_type)) + self.actuator_target.v1_to_message(self.value_type) + ) if self.metadata is not None: - message.metadata.MergeFrom( - self.metadata.to_message(self.value_type)) + message.metadata.MergeFrom(self.metadata.to_message(self.value_type)) return message def to_dict(self) -> Dict[str, Any]: - out_dict = {'path': self.path} + out_dict = {"path": self.path} if self.value is not None: - out_dict['value'] = self.value.to_dict() + out_dict["value"] = self.value.to_dict() if self.actuator_target is not None: - out_dict['actuator_target'] = self.actuator_target.to_dict() + out_dict["actuator_target"] = self.actuator_target.to_dict() if self.metadata is not None: - out_dict['metadata'] = self.metadata.to_dict() + out_dict["metadata"] = self.metadata.to_dict() return out_dict @@ -520,19 +682,43 @@ class EntryUpdate: fields: Iterable[Field] @classmethod - def from_message(cls, message: val_pb2.EntryUpdate): + def from_message(cls, message: val_v1.EntryUpdate): return cls( entry=DataEntry.from_message(message.entry), fields=[Field(field) for field in message.fields], ) - def to_message(self) -> val_pb2.EntryUpdate: - message = val_pb2.EntryUpdate(entry=self.entry.to_message()) + @classmethod + def from_tuple(cls, path: str, dp: types_v2.Datapoint): + # we assume here that only one field of Value is set -> we use the first entry. + # This should always be the case. + data = dp.value.ListFields() + field_descriptor, value = data[0] + field_name = field_descriptor.name + value = getattr(dp.value, field_name) + if dp.timestamp.seconds == 0 and dp.timestamp.nanos == 0: + timestamp = None + else: + timestamp = dp.timestamp.ToDatetime( + tzinfo=datetime.timezone.utc, + ) + return cls( + entry=DataEntry( + path=path, value=Datapoint(value=value, timestamp=timestamp) + ), + fields=[Field(value=types_v1.FIELD_VALUE)], + ) + + def to_message(self) -> val_v1.EntryUpdate: + message = val_v1.EntryUpdate(entry=self.entry.to_message()) message.fields.extend(field.value for field in self.fields) return message def to_dict(self) -> Dict[str, Any]: - return {'entry': self.entry.to_dict(), 'fields': [field.name for field in self.fields]} + return { + "entry": self.entry.to_dict(), + "fields": [field.name for field in self.fields], + } @dataclasses.dataclass @@ -548,11 +734,12 @@ class ServerInfo: version: str @classmethod - def from_message(cls, message: val_pb2.GetServerInfoResponse): + def from_message(cls, message: val_v1.GetServerInfoResponse): return cls(name=message.name, version=message.version) class BaseVSSClient: + def __init__( self, host: str, @@ -561,50 +748,58 @@ def __init__( root_certificates: Optional[Path] = None, ensure_startup_connection: bool = True, connected: bool = False, - tls_server_name: Optional[str] = None + tls_server_name: Optional[str] = None, ): self.authorization_header = self.get_authorization_header(token) - self.target_host = f'{host}:{port}' + self.target_host = f"{host}:{port}" self.root_certificates = root_certificates self.tls_server_name = tls_server_name self.ensure_startup_connection = ensure_startup_connection self.connected = connected - self.client_stub = None + self.client_stub_v1 = None + self.client_stub_v2 = None def _load_creds(self) -> Optional[grpc.ChannelCredentials]: if self.root_certificates: logger.info(f"Using TLS with Root CA from {self.root_certificates}") root_certificates = self.root_certificates.read_bytes() return grpc.ssl_channel_credentials(root_certificates) - logger.info("No Root CA present, it will not be possible to use a secure connection!") + logger.info( + "No Root CA present, it will not be possible to use a secure connection!" + ) return None - def _prepare_get_request(self, entries: Iterable[EntryRequest]) -> val_pb2.GetRequest: - req = val_pb2.GetRequest(entries=[]) + def _prepare_get_request( + self, entries: Iterable[EntryRequest] + ) -> val_v1.GetRequest: + req = val_v1.GetRequest(entries=[]) for entry in entries: - entry_request = val_pb2.EntryRequest( - path=entry.path, view=entry.view.value, fields=[]) + entry_request = val_v1.EntryRequest( + path=entry.path, view=entry.view.value, fields=[] + ) for field in entry.fields: entry_request.fields.append(field.value) req.entries.append(entry_request) logger.debug("%s: %s", type(req).__name__, req) return req - def _process_get_response(self, response: val_pb2.GetResponse) -> List[DataEntry]: + def _process_get_response(self, response: val_v1.GetResponse) -> List[DataEntry]: logger.debug("%s: %s", type(response).__name__, response) self._raise_if_invalid(response) return [DataEntry.from_message(entry) for entry in response.entries] - def _get_paths_with_required_type(self, updates: Collection[EntryUpdate]) -> Dict[str, DataType]: + def _get_paths_with_required_type( + self, updates: Collection[EntryUpdate] + ) -> Dict[str, DataType]: paths_with_required_type = {} for update in updates: metadata = update.entry.metadata # We need a data type in order to set sensor/actuator value or metadata's value restriction if ( - Field.ACTUATOR_TARGET in update.fields or - Field.VALUE in update.fields or - (metadata is not None and metadata.value_restriction is not None) + Field.ACTUATOR_TARGET in update.fields + or Field.VALUE in update.fields + or (metadata is not None and metadata.value_restriction is not None) ): # If the update holds a new data type, we assume it will be applied before # setting the sensor/actuator value. @@ -614,9 +809,11 @@ def _get_paths_with_required_type(self, updates: Collection[EntryUpdate]) -> Dic return paths_with_required_type def _prepare_set_request( - self, updates: Collection[EntryUpdate], paths_with_required_type: Dict[str, DataType], - ) -> val_pb2.SetRequest: - req = val_pb2.SetRequest(updates=[]) + self, + updates: Collection[EntryUpdate], + paths_with_required_type: Dict[str, DataType], + ) -> val_v1.SetRequest: + req = val_v1.SetRequest(updates=[]) for update in updates: value_type = paths_with_required_type.get(update.entry.path) if value_type is not None: @@ -625,43 +822,77 @@ def _prepare_set_request( logger.debug("%s: %s", type(req).__name__, req) return req - def _process_set_response(self, response: val_pb2.SetResponse) -> None: + def _prepare_publish_value_request( + self, + update: EntryUpdate, + paths_with_required_type: Dict[str, DataType], + ) -> val_v2.PublishValueRequest: + value_type = paths_with_required_type.get(update.entry.path) + if value_type is not None: + update.entry.value_type = value_type + req = val_v2.PublishValueRequest( + signal_id=types_v2.SignalID(path=update.entry.path), + data_point=update.entry.value.v2_to_message(update.entry.value_type), + ) + logger.debug("%s: %s", type(req).__name__, req) + return req + + def _process_set_response(self, response: val_v1.SetResponse) -> None: logger.debug("%s: %s", type(response).__name__, response) self._raise_if_invalid(response) def _prepare_subscribe_request( - self, entries: Iterable[SubscribeEntry], - ) -> val_pb2.SubscribeRequest: - req = val_pb2.SubscribeRequest() + self, + entries: Iterable[SubscribeEntry], + ) -> val_v1.SubscribeRequest: + req = val_v1.SubscribeRequest() for entry in entries: - entry_request = val_pb2.SubscribeEntry( - path=entry.path, view=entry.view.value, fields=[]) + entry_request = val_v1.SubscribeEntry( + path=entry.path, view=entry.view.value, fields=[] + ) for field in entry.fields: entry_request.fields.append(field.value) req.entries.append(entry_request) logger.debug("%s: %s", type(req).__name__, req) return req + def _prepare_subscribev2_request( + self, + entries: Iterable[SubscribeEntry], + ) -> val_v2.SubscribeRequest: + paths = [] + for entry in entries: + paths.append(entry.path) + req = val_v2.SubscribeRequest(signal_paths=paths) + logger.debug("%s: %s", type(req).__name__, req) + return req + def _raise_if_invalid(self, response): - if response.HasField('error'): + if response.HasField("error"): error = json_format.MessageToDict( - response.error, preserving_proto_field_name=True) + response.error, preserving_proto_field_name=True + ) else: error = {} if response.errors: - errors = [json_format.MessageToDict( - err, preserving_proto_field_name=True) for err in response.errors] + errors = [ + json_format.MessageToDict(err, preserving_proto_field_name=True) + for err in response.errors + ] else: errors = [] raise_error = False - if (error and error.get('code') != 200): + if error and error.get("code") != 200: raise_error = True else: for sub_error in errors: - if 'error' in sub_error: - if sub_error['error'].get('code') != 200: - logger.debug("Sub-error %d but no top level error", sub_error['error'].get('code')) + if "error" in sub_error: + if sub_error["error"].get("code") != 200: + logger.debug( + "Sub-error %d but no top level error", + sub_error["error"].get("code"), + ) raise_error = True else: logger.error("No error field for sub-error") @@ -707,7 +938,10 @@ def wrapper(self, *args, **kwargs): else: # This shall normally not happen if you use the client as context manager # as then a connect will happen automatically when you enter the context - raise Exception("Server not connected! Call connect() before using this command!") + raise Exception( + "Server not connected! Call connect() before using this command!" + ) + return wrapper def connect(self, target_host=None): @@ -719,7 +953,7 @@ def connect(self, target_host=None): logger.info("Establishing secure channel") if self.tls_server_name: logger.info(f"Using TLS server name {self.tls_server_name}") - options = [('grpc.ssl_target_name_override', self.tls_server_name)] + options = [("grpc.ssl_target_name_override", self.tls_server_name)] channel = grpc.secure_channel(target_host, creds, options) else: logger.debug("Not providing explicit TLS server name") @@ -729,19 +963,23 @@ def connect(self, target_host=None): channel = grpc.insecure_channel(target_host) self.channel = self.exit_stack.enter_context(channel) - self.client_stub = val_pb2_grpc.VALStub(self.channel) + self.client_stub_v1 = val_grpc_v1.VALStub(self.channel) + self.client_stub_v2 = val_grpc_v2.VALStub(self.channel) self.connected = True if self.ensure_startup_connection: logger.debug("Connected to server: %s", self.get_server_info()) def disconnect(self): self.exit_stack.close() - self.client_stub = None + self.client_stub_v1 = None + self.client_stub_v2 = None self.channel = None self.connected = False @check_connected - def get_current_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[str, Datapoint]: + def get_current_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> Dict[str, Datapoint]: """ Parameters: rpc_kwargs @@ -754,12 +992,17 @@ def get_current_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[str, Da speed_value = current_values['Vehicle.Speed'].value """ entries = self.get( - entries=(EntryRequest(path, View.CURRENT_VALUE, (Field.VALUE,)) for path in paths), **rpc_kwargs, + entries=( + EntryRequest(path, View.CURRENT_VALUE, (Field.VALUE,)) for path in paths + ), + **rpc_kwargs, ) return {entry.path: entry.value for entry in entries} @check_connected - def get_target_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[str, Datapoint]: + def get_target_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> Dict[str, Datapoint]: """ Parameters: rpc_kwargs @@ -770,14 +1013,25 @@ def get_target_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[str, Dat ]) is_abs_to_become_active = target_values['Vehicle.ADAS.ABS.IsActive'].value """ - entries = self.get(entries=( - EntryRequest(path, View.TARGET_VALUE, (Field.ACTUATOR_TARGET,), - ) for path in paths), **rpc_kwargs) + entries = self.get( + entries=( + EntryRequest( + path, + View.TARGET_VALUE, + (Field.ACTUATOR_TARGET,), + ) + for path in paths + ), + **rpc_kwargs, + ) return {entry.path: entry.actuator_target for entry in entries} @check_connected def get_metadata( - self, paths: Iterable[str], field: MetadataField = MetadataField.ALL, **rpc_kwargs, + self, + paths: Iterable[str], + field: MetadataField = MetadataField.ALL, + **rpc_kwargs, ) -> Dict[str, Metadata]: """ Parameters: @@ -791,7 +1045,11 @@ def get_metadata( speed_unit = metadata['Vehicle.Speed'].unit """ entries = self.get( - entries=(EntryRequest(path, View.METADATA, (Field(field.value),)) for path in paths), **rpc_kwargs, + entries=( + EntryRequest(path, View.METADATA, (Field(field.value),)) + for path in paths + ), + **rpc_kwargs, ) return {entry.path: entry.metadata for entry in entries} @@ -808,8 +1066,11 @@ def set_current_values(self, updates: Dict[str, Datapoint], **rpc_kwargs) -> Non }) """ self.set( - updates=[EntryUpdate(DataEntry(path, value=dp), (Field.VALUE,)) - for path, dp in updates.items()], + updates=[ + EntryUpdate(DataEntry(path, value=dp), (Field.VALUE,)) + for path, dp in updates.items() + ], + try_v2=True, **rpc_kwargs, ) @@ -823,13 +1084,23 @@ def set_target_values(self, updates: Dict[str, Datapoint], **rpc_kwargs) -> None client.set_target_values( {'Vehicle.ADAS.ABS.IsActive': Datapoint(True)}) """ - self.set(updates=[EntryUpdate( - DataEntry(path, actuator_target=dp), (Field.ACTUATOR_TARGET,), - ) for path, dp in updates.items()], **rpc_kwargs) + self.set( + updates=[ + EntryUpdate( + DataEntry(path, actuator_target=dp), + (Field.ACTUATOR_TARGET,), + ) + for path, dp in updates.items() + ], + **rpc_kwargs, + ) @check_connected def set_metadata( - self, updates: Dict[str, Metadata], field: MetadataField = MetadataField.ALL, **rpc_kwargs, + self, + updates: Dict[str, Metadata], + field: MetadataField = MetadataField.ALL, + **rpc_kwargs, ) -> None: """ Parameters: @@ -840,12 +1111,21 @@ def set_metadata( 'Vehicle.Cabin.Door.Row1.Left.Shade.Position': Metadata(data_type=DataType.FLOAT), }) """ - self.set(updates=[EntryUpdate( - DataEntry(path, metadata=md), (Field(field.value),), - ) for path, md in updates.items()], **rpc_kwargs) + self.set( + updates=[ + EntryUpdate( + DataEntry(path, metadata=md), + (Field(field.value),), + ) + for path, md in updates.items() + ], + **rpc_kwargs, + ) @check_connected - def subscribe_current_values(self, paths: Iterable[str], **rpc_kwargs) -> Iterator[Dict[str, Datapoint]]: + def subscribe_current_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> Iterator[Dict[str, Datapoint]]: """ Parameters: rpc_kwargs @@ -858,14 +1138,19 @@ def subscribe_current_values(self, paths: Iterable[str], **rpc_kwargs) -> Iterat print(f"Current value for {path} is now: {dp.value}") """ for updates in self.subscribe( - entries=(SubscribeEntry(path, View.CURRENT_VALUE, (Field.VALUE,)) - for path in paths), + entries=( + SubscribeEntry(path, View.CURRENT_VALUE, (Field.VALUE,)) + for path in paths + ), + try_v2=True, **rpc_kwargs, ): yield {update.entry.path: update.entry.value for update in updates} @check_connected - def subscribe_target_values(self, paths: Iterable[str], **rpc_kwargs) -> Iterator[Dict[str, Datapoint]]: + def subscribe_target_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> Iterator[Dict[str, Datapoint]]: """ Parameters: rpc_kwargs @@ -878,15 +1163,20 @@ def subscribe_target_values(self, paths: Iterable[str], **rpc_kwargs) -> Iterato print(f"Target value for {path} is now: {dp.value}") """ for updates in self.subscribe( - entries=(SubscribeEntry(path, View.TARGET_VALUE, - (Field.ACTUATOR_TARGET,)) for path in paths), + entries=( + SubscribeEntry(path, View.TARGET_VALUE, (Field.ACTUATOR_TARGET,)) + for path in paths + ), **rpc_kwargs, ): - yield {update.entry.path: update.entry.actuator_target for update in updates} + yield { + update.entry.path: update.entry.actuator_target for update in updates + } @check_connected def subscribe_metadata( - self, paths: Iterable[str], + self, + paths: Iterable[str], field: MetadataField = MetadataField.ALL, **rpc_kwargs, ) -> Iterator[Dict[str, Metadata]]: @@ -903,8 +1193,10 @@ def subscribe_metadata( print(f"Metadata for {path} are now: {md.to_dict()}") """ for updates in self.subscribe( - entries=(SubscribeEntry(path, View.METADATA, (Field(field.value),)) - for path in paths), + entries=( + SubscribeEntry(path, View.METADATA, (Field(field.value),)) + for path in paths + ), **rpc_kwargs, ): yield {update.entry.path: update.entry.metadata for update in updates} @@ -917,38 +1209,72 @@ def get(self, entries: Iterable[EntryRequest], **rpc_kwargs) -> List[DataEntry]: grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ rpc_kwargs["metadata"] = self.generate_metadata_header( - rpc_kwargs.get("metadata")) + rpc_kwargs.get("metadata") + ) req = self._prepare_get_request(entries) try: - resp = self.client_stub.Get(req, **rpc_kwargs) + resp = self.client_stub_v1.Get(req, **rpc_kwargs) except RpcError as exc: raise VSSClientError.from_grpc_error(exc) from exc return self._process_get_response(resp) @check_connected - def set(self, updates: Collection[EntryUpdate], **rpc_kwargs) -> None: + def set( + self, updates: Collection[EntryUpdate], try_v2: bool = True, **rpc_kwargs + ) -> None: """ Parameters: rpc_kwargs grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ rpc_kwargs["metadata"] = self.generate_metadata_header( - rpc_kwargs.get("metadata")) + rpc_kwargs.get("metadata") + ) paths_with_required_type = self._get_paths_with_required_type(updates) paths_without_type = [ - path for path, data_type in paths_with_required_type.items() if data_type is DataType.UNSPECIFIED + path + for path, data_type in paths_with_required_type.items() + if data_type is DataType.UNSPECIFIED ] paths_with_required_type.update( - self.get_value_types(paths_without_type, **rpc_kwargs)) - req = self._prepare_set_request(updates, paths_with_required_type) - try: - resp = self.client_stub.Set(req, **rpc_kwargs) - except RpcError as exc: - raise VSSClientError.from_grpc_error(exc) from exc - self._process_set_response(resp) + self.get_value_types(paths_without_type, **rpc_kwargs) + ) + if try_v2: + logger.debug("Trying v2") + if len(updates) == 0: + raise VSSClientError( + error={ + "code": grpc.StatusCode.INVALID_ARGUMENT.value[0], + "reason": grpc.StatusCode.INVALID_ARGUMENT.value[1], + "message": "No datapoints requested", + }, + errors=[], + ) + for update in updates: + req = self._prepare_publish_value_request( + update, paths_with_required_type + ) + try: + resp = self.client_stub_v2.PublishValueRequest(req, **rpc_kwargs) + except RpcError as exc: + if exc.code() == grpc.StatusCode.UNIMPLEMENTED: + logger.debug("v2 not available fall back to v1 instead") + self.set(updates) + else: + raise VSSClientError.from_grpc_error(exc) from exc + else: + logger.debug("Trying v1") + req = self._prepare_set_request(updates, paths_with_required_type) + try: + resp = self.client_stub_v1.Set(req, **rpc_kwargs) + except RpcError as exc: + raise VSSClientError.from_grpc_error(exc) from exc + self._process_set_response(resp) @check_connected - def subscribe(self, entries: Iterable[SubscribeEntry], **rpc_kwargs) -> Iterator[List[EntryUpdate]]: + def subscribe( + self, entries: Iterable[SubscribeEntry], try_v2: bool = True, **rpc_kwargs + ) -> Iterator[List[EntryUpdate]]: """ Parameters: rpc_kwargs @@ -956,15 +1282,35 @@ def subscribe(self, entries: Iterable[SubscribeEntry], **rpc_kwargs) -> Iterator """ rpc_kwargs["metadata"] = self.generate_metadata_header( - rpc_kwargs.get("metadata")) - req = self._prepare_subscribe_request(entries) - resp_stream = self.client_stub.Subscribe(req, **rpc_kwargs) - try: - for resp in resp_stream: - logger.debug("%s: %s", type(resp).__name__, resp) - yield [EntryUpdate.from_message(update) for update in resp.updates] - except RpcError as exc: - raise VSSClientError.from_grpc_error(exc) from exc + rpc_kwargs.get("metadata") + ) + if try_v2: + logger.debug("Trying v2") + req = self._prepare_subscribev2_request(entries) + resp_stream = self.client_stub_v2.Subscribe(req, **rpc_kwargs) + try: + for resp in resp_stream: + logger.debug("%s: %s", type(resp).__name__, resp) + yield [ + EntryUpdate.from_tuple(path, dp) + for path, dp in resp.entries.items() + ] + except RpcError as exc: + if exc.code() == grpc.StatusCode.UNIMPLEMENTED: + logger.debug("v2 not available fall back to v1 instead") + self.subscribe(entries) + else: + raise VSSClientError.from_grpc_error(exc) from exc + else: + logger.debug("Trying v1") + req = self._prepare_subscribe_request(entries) + resp_stream = self.client_stub_v1.Subscribe(req, **rpc_kwargs) + try: + for resp in resp_stream: + logger.debug("%s: %s", type(resp).__name__, resp) + yield [EntryUpdate.from_message(update) for update in resp.updates] + except RpcError as exc: + raise VSSClientError.from_grpc_error(exc) from exc @check_connected def authorize(self, token: str, **rpc_kwargs) -> str: @@ -976,10 +1322,12 @@ def authorize(self, token: str, **rpc_kwargs) -> str: string containing the actual token """ rpc_kwargs["metadata"] = self.generate_metadata_header( - metadata=rpc_kwargs.get("metadata"), header=self.get_authorization_header(token)) - req = val_pb2.GetServerInfoRequest() + metadata=rpc_kwargs.get("metadata"), + header=self.get_authorization_header(token), + ) + req = val_v1.GetServerInfoRequest() try: - resp = self.client_stub.GetServerInfo(req, **rpc_kwargs) + resp = self.client_stub_v1.GetServerInfo(req, **rpc_kwargs) except RpcError as exc: raise VSSClientError.from_grpc_error(exc) from exc logger.debug("%s: %s", type(resp).__name__, resp) @@ -994,11 +1342,12 @@ def get_server_info(self, **rpc_kwargs) -> Optional[ServerInfo]: grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ rpc_kwargs["metadata"] = self.generate_metadata_header( - metadata=rpc_kwargs.get("metadata")) - req = val_pb2.GetServerInfoRequest() + metadata=rpc_kwargs.get("metadata") + ) + req = val_v1.GetServerInfoRequest() logger.debug("%s: %s", type(req).__name__, req) try: - resp = self.client_stub.GetServerInfo(req, **rpc_kwargs) + resp = self.client_stub_v1.GetServerInfo(req, **rpc_kwargs) logger.debug("%s: %s", type(resp).__name__, resp) return ServerInfo.from_message(resp) except RpcError as exc: @@ -1009,7 +1358,9 @@ def get_server_info(self, **rpc_kwargs) -> Optional[ServerInfo]: return None @check_connected - def get_value_types(self, paths: Collection[str], **rpc_kwargs) -> Dict[str, DataType]: + def get_value_types( + self, paths: Collection[str], **rpc_kwargs + ) -> Dict[str, DataType]: """ Parameters: rpc_kwargs @@ -1017,10 +1368,14 @@ def get_value_types(self, paths: Collection[str], **rpc_kwargs) -> Dict[str, Dat req = self._prepare_get_request(entries) """ if paths: - entry_requests = (EntryRequest( - path=path, view=View.METADATA, fields=( - Field.METADATA_DATA_TYPE,), - ) for path in paths) + entry_requests = ( + EntryRequest( + path=path, + view=View.METADATA, + fields=(Field.METADATA_DATA_TYPE,), + ) + for path in paths + ) entries = self.get(entries=entry_requests, **rpc_kwargs) return {entry.path: DataType(entry.metadata.data_type) for entry in entries} return {} diff --git a/kuksa-client/kuksa_client/grpc/aio.py b/kuksa-client/kuksa_client/grpc/aio.py index 6661aab..2571b2c 100644 --- a/kuksa-client/kuksa_client/grpc/aio.py +++ b/kuksa-client/kuksa_client/grpc/aio.py @@ -31,8 +31,9 @@ import grpc from grpc.aio import AioRpcError -from kuksa.val.v1 import val_pb2 -from kuksa.val.v1 import val_pb2_grpc +from kuksa.val.v1 import val_pb2 as val_v1 +from kuksa.val.v1 import val_pb2_grpc as val_grpc_v1 +from kuksa.val.v2 import val_pb2_grpc as val_grpc_v2 from . import BaseVSSClient from . import Datapoint @@ -72,7 +73,7 @@ async def connect(self, target_host=None): logger.info("Establishing secure channel") if self.tls_server_name: logger.info(f"Using TLS server name {self.tls_server_name}") - options = [('grpc.ssl_target_name_override', self.tls_server_name)] + options = [("grpc.ssl_target_name_override", self.tls_server_name)] channel = grpc.aio.secure_channel(target_host, creds, options) else: logger.debug("Not providing explicit TLS server name") @@ -80,15 +81,18 @@ async def connect(self, target_host=None): else: logger.info("Establishing insecure channel") channel = grpc.aio.insecure_channel(target_host) + self.channel = await self.exit_stack.enter_async_context(channel) - self.client_stub = val_pb2_grpc.VALStub(self.channel) + self.client_stub_v1 = val_grpc_v1.VALStub(self.channel) + self.client_stub_v2 = val_grpc_v2.VALStub(self.channel) self.connected = True if self.ensure_startup_connection: logger.debug("Connected to server: %s", await self.get_server_info()) async def disconnect(self): await self.exit_stack.aclose() - self.client_stub = None + self.client_stub_v1 = None + self.client_stub_v2 = None self.channel = None self.connected = False @@ -97,19 +101,24 @@ def check_connected_async(func): Decorator to verify that there is a connection before calling underlying method For generator methods use check_connected_async_iter """ + async def wrapper(self, *args, **kwargs): if self.connected: return await func(self, *args, **kwargs) else: # This shall normally not happen if you use the client as context manager # as then a connect will happen automatically when you enter the context - raise Exception("Server not connected! Call connect() before using this command!") + raise Exception( + "Server not connected! Call connect() before using this command!" + ) + return wrapper def check_connected_async_iter(func): """ Decorator for generator methods to verify that there is a connection before calling underlying method """ + async def wrapper(self, *args, **kwargs): if self.connected: async for v in func(self, *args, **kwargs): @@ -117,11 +126,16 @@ async def wrapper(self, *args, **kwargs): else: # This shall normally not happen if you use the client as context manager # as then a connect will happen automatically when you enter the context - raise Exception("Server not connected! Call connect() before using this command!") + raise Exception( + "Server not connected! Call connect() before using this command!" + ) + return wrapper @check_connected_async - async def get_current_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[str, Datapoint]: + async def get_current_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> Dict[str, Datapoint]: """ Parameters: rpc_kwargs @@ -134,14 +148,17 @@ async def get_current_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[s speed_value = current_values['Vehicle.Speed'].value """ entries = await self.get( - entries=(EntryRequest(path, View.CURRENT_VALUE, (Field.VALUE,)) - for path in paths), + entries=( + EntryRequest(path, View.CURRENT_VALUE, (Field.VALUE,)) for path in paths + ), **rpc_kwargs, ) return {entry.path: entry.value for entry in entries} @check_connected_async - async def get_target_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[str, Datapoint]: + async def get_target_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> Dict[str, Datapoint]: """ Parameters: rpc_kwargs @@ -152,15 +169,25 @@ async def get_target_values(self, paths: Iterable[str], **rpc_kwargs) -> Dict[st ]) is_abs_to_become_active = target_values['Vehicle.ADAS.ABS.IsActive'].value """ - entries = await self.get(entries=( - EntryRequest(path, View.TARGET_VALUE, (Field.ACTUATOR_TARGET,), - **rpc_kwargs, - ) for path in paths)) + entries = await self.get( + entries=( + EntryRequest( + path, + View.TARGET_VALUE, + (Field.ACTUATOR_TARGET,), + **rpc_kwargs, + ) + for path in paths + ) + ) return {entry.path: entry.actuator_target for entry in entries} @check_connected_async async def get_metadata( - self, paths: Iterable[str], field: MetadataField = MetadataField.ALL, **rpc_kwargs, + self, + paths: Iterable[str], + field: MetadataField = MetadataField.ALL, + **rpc_kwargs, ) -> Dict[str, Metadata]: """ Parameters: @@ -174,14 +201,18 @@ async def get_metadata( speed_unit = metadata['Vehicle.Speed'].unit """ entries = await self.get( - entries=(EntryRequest(path, View.METADATA, (Field(field.value),)) - for path in paths), + entries=( + EntryRequest(path, View.METADATA, (Field(field.value),)) + for path in paths + ), **rpc_kwargs, ) return {entry.path: entry.metadata for entry in entries} @check_connected_async - async def set_current_values(self, updates: Dict[str, Datapoint], **rpc_kwargs) -> None: + async def set_current_values( + self, updates: Dict[str, Datapoint], **rpc_kwargs + ) -> None: """ Parameters: rpc_kwargs @@ -192,14 +223,20 @@ async def set_current_values(self, updates: Dict[str, Datapoint], **rpc_kwargs) 'Vehicle.ADAS.ABS.IsActive': Datapoint(False), }) """ + logger.info("Setting current value") await self.set( - updates=[EntryUpdate(DataEntry(path, value=dp), (Field.VALUE,)) - for path, dp in updates.items()], + updates=[ + EntryUpdate(DataEntry(path, value=dp), (Field.VALUE,)) + for path, dp in updates.items() + ], + try_v2=True, **rpc_kwargs, ) @check_connected_async - async def set_target_values(self, updates: Dict[str, Datapoint], **rpc_kwargs) -> None: + async def set_target_values( + self, updates: Dict[str, Datapoint], **rpc_kwargs + ) -> None: """ Parameters: rpc_kwargs @@ -207,13 +244,23 @@ async def set_target_values(self, updates: Dict[str, Datapoint], **rpc_kwargs) - Example: await client.set_target_values({'Vehicle.ADAS.ABS.IsActive': Datapoint(True)}) """ - await self.set(updates=[EntryUpdate( - DataEntry(path, actuator_target=dp), (Field.ACTUATOR_TARGET,), - ) for path, dp in updates.items()], **rpc_kwargs) + await self.set( + updates=[ + EntryUpdate( + DataEntry(path, actuator_target=dp), + (Field.ACTUATOR_TARGET,), + ) + for path, dp in updates.items() + ], + **rpc_kwargs, + ) @check_connected_async async def set_metadata( - self, updates: Dict[str, Metadata], field: MetadataField = MetadataField.ALL, **rpc_kwargs, + self, + updates: Dict[str, Metadata], + field: MetadataField = MetadataField.ALL, + **rpc_kwargs, ) -> None: """ Parameters: @@ -224,12 +271,21 @@ async def set_metadata( 'Vehicle.Cabin.Door.Row1.Left.Shade.Position': Metadata(data_type=DataType.FLOAT), }) """ - await self.set(updates=[EntryUpdate( - DataEntry(path, metadata=md), (Field(field.value),), - ) for path, md in updates.items()], **rpc_kwargs) + await self.set( + updates=[ + EntryUpdate( + DataEntry(path, metadata=md), + (Field(field.value),), + ) + for path, md in updates.items() + ], + **rpc_kwargs, + ) @check_connected_async_iter - async def subscribe_current_values(self, paths: Iterable[str], **rpc_kwargs) -> AsyncIterator[Dict[str, Datapoint]]: + async def subscribe_current_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> AsyncIterator[Dict[str, Datapoint]]: """ Parameters: rpc_kwargs @@ -242,14 +298,19 @@ async def subscribe_current_values(self, paths: Iterable[str], **rpc_kwargs) -> print(f"Current value for {path} is now: {dp.value}") """ async for updates in self.subscribe( - entries=(SubscribeEntry(path, View.CURRENT_VALUE, (Field.VALUE,)) - for path in paths), + entries=( + SubscribeEntry(path, View.CURRENT_VALUE, (Field.VALUE,)) + for path in paths + ), + try_v2=True, **rpc_kwargs, ): yield {update.entry.path: update.entry.value for update in updates} @check_connected_async_iter - async def subscribe_target_values(self, paths: Iterable[str], **rpc_kwargs) -> AsyncIterator[Dict[str, Datapoint]]: + async def subscribe_target_values( + self, paths: Iterable[str], **rpc_kwargs + ) -> AsyncIterator[Dict[str, Datapoint]]: """ Parameters: rpc_kwargs @@ -262,15 +323,21 @@ async def subscribe_target_values(self, paths: Iterable[str], **rpc_kwargs) -> A print(f"Target value for {path} is now: {dp.value}") """ async for updates in self.subscribe( - entries=(SubscribeEntry(path, View.TARGET_VALUE, - (Field.ACTUATOR_TARGET,)) for path in paths), + entries=( + SubscribeEntry(path, View.TARGET_VALUE, (Field.ACTUATOR_TARGET,)) + for path in paths + ), + try_v2=True, **rpc_kwargs, ): - yield {update.entry.path: update.entry.actuator_target for update in updates} + yield { + update.entry.path: update.entry.actuator_target for update in updates + } @check_connected_async_iter async def subscribe_metadata( - self, paths: Iterable[str], + self, + paths: Iterable[str], field: MetadataField = MetadataField.ALL, **rpc_kwargs, ) -> AsyncIterator[Dict[str, Metadata]]: @@ -287,69 +354,128 @@ async def subscribe_metadata( print(f"Metadata for {path} are now: {md.to_dict()}") """ async for updates in self.subscribe( - entries=(SubscribeEntry(path, View.METADATA, (Field(field.value),)) - for path in paths), + entries=( + SubscribeEntry(path, View.METADATA, (Field(field.value),)) + for path in paths + ), **rpc_kwargs, ): yield {update.entry.path: update.entry.metadata for update in updates} @check_connected_async - async def get(self, entries: Iterable[EntryRequest], **rpc_kwargs) -> List[DataEntry]: + async def get( + self, entries: Iterable[EntryRequest], **rpc_kwargs + ) -> List[DataEntry]: """ Parameters: rpc_kwargs grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ rpc_kwargs["metadata"] = self.generate_metadata_header( - rpc_kwargs.get("metadata")) + rpc_kwargs.get("metadata") + ) req = self._prepare_get_request(entries) try: - resp = await self.client_stub.Get(req, **rpc_kwargs) + resp = await self.client_stub_v1.Get(req, **rpc_kwargs) except AioRpcError as exc: raise VSSClientError.from_grpc_error(exc) from exc return self._process_get_response(resp) @check_connected_async - async def set(self, updates: Collection[EntryUpdate], **rpc_kwargs) -> None: + async def set( + self, updates: Collection[EntryUpdate], try_v2: bool = False, **rpc_kwargs + ) -> None: """ Parameters: rpc_kwargs grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ rpc_kwargs["metadata"] = self.generate_metadata_header( - rpc_kwargs.get("metadata")) + rpc_kwargs.get("metadata") + ) paths_with_required_type = self._get_paths_with_required_type(updates) paths_without_type = [ - path for path, data_type in paths_with_required_type.items() if data_type is DataType.UNSPECIFIED + path + for path, data_type in paths_with_required_type.items() + if data_type is DataType.UNSPECIFIED ] - paths_with_required_type.update(await self.get_value_types(paths_without_type, **rpc_kwargs)) - req = self._prepare_set_request(updates, paths_with_required_type) - try: - resp = await self.client_stub.Set(req, **rpc_kwargs) - except AioRpcError as exc: - raise VSSClientError.from_grpc_error(exc) from exc - self._process_set_response(resp) + paths_with_required_type.update( + await self.get_value_types(paths_without_type, **rpc_kwargs) + ) + if try_v2: + logger.debug("Trying v2") + if len(updates) == 0: + raise VSSClientError( + error={ + "code": grpc.StatusCode.INVALID_ARGUMENT.value[0], + "reason": grpc.StatusCode.INVALID_ARGUMENT.value[1], + "message": "No datapoints requested", + }, + errors=[], + ) + for update in updates: + req = self._prepare_publish_value_request( + update, paths_with_required_type + ) + try: + resp = await self.client_stub_v2.PublishValue(req, **rpc_kwargs) + except AioRpcError as exc: + if exc.code() == grpc.StatusCode.UNIMPLEMENTED: + logger.debug("v2 not available fall back to v1 instead") + await self.set(updates) + else: + raise VSSClientError.from_grpc_error(exc) from exc + else: + logger.debug("Trying v1") + req = self._prepare_set_request(updates, paths_with_required_type) + try: + resp = await self.client_stub_v1.Set(req, **rpc_kwargs) + except AioRpcError as exc: + raise VSSClientError.from_grpc_error(exc) from exc + self._process_set_response(resp) @check_connected_async_iter - async def subscribe(self, - entries: Iterable[SubscribeEntry], - **rpc_kwargs, - ) -> AsyncIterator[List[EntryUpdate]]: + async def subscribe( + self, + entries: Iterable[SubscribeEntry], + try_v2: bool = False, + **rpc_kwargs, + ) -> AsyncIterator[List[EntryUpdate]]: """ Parameters: rpc_kwargs grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ rpc_kwargs["metadata"] = self.generate_metadata_header( - rpc_kwargs.get("metadata")) - req = self._prepare_subscribe_request(entries) - resp_stream = self.client_stub.Subscribe(req, **rpc_kwargs) - try: - async for resp in resp_stream: - logger.debug("%s: %s", type(resp).__name__, resp) - yield [EntryUpdate.from_message(update) for update in resp.updates] - except AioRpcError as exc: - raise VSSClientError.from_grpc_error(exc) from exc + rpc_kwargs.get("metadata") + ) + if try_v2: + logger.debug("Trying v2") + req = self._prepare_subscribev2_request(entries) + resp_stream = self.client_stub_v2.Subscribe(req, **rpc_kwargs) + try: + async for resp in resp_stream: + logger.debug("%s: %s", type(resp).__name__, resp) + yield [ + EntryUpdate.from_tuple(path, dp) + for path, dp in resp.entries.items() + ] + except AioRpcError as exc: + if exc.code() == grpc.StatusCode.UNIMPLEMENTED: + logger.debug("v2 not available fall back to v1 instead") + await self.subscribe(entries) + else: + raise VSSClientError.from_grpc_error(exc) from exc + else: + logger.debug("Trying v1") + req = self._prepare_subscribe_request(entries) + resp_stream = self.client_stub_v1.Subscribe(req, **rpc_kwargs) + try: + async for resp in resp_stream: + logger.debug("%s: %s", type(resp).__name__, resp) + yield [EntryUpdate.from_message(update) for update in resp.updates] + except AioRpcError as exc: + raise VSSClientError.from_grpc_error(exc) from exc @check_connected_async async def authorize(self, token: str, **rpc_kwargs) -> str: @@ -361,10 +487,12 @@ async def authorize(self, token: str, **rpc_kwargs) -> str: string containing the actual token """ rpc_kwargs["metadata"] = self.generate_metadata_header( - metadata=rpc_kwargs.get("metadata"), header=self.get_authorization_header(token)) - req = val_pb2.GetServerInfoRequest() + metadata=rpc_kwargs.get("metadata"), + header=self.get_authorization_header(token), + ) + req = val_v1.GetServerInfoRequest() try: - resp = await self.client_stub.GetServerInfo(req, **rpc_kwargs) + resp = await self.client_stub_v1.GetServerInfo(req, **rpc_kwargs) except AioRpcError as exc: raise VSSClientError.from_grpc_error(exc) from exc logger.debug("%s: %s", type(resp).__name__, resp) @@ -379,11 +507,12 @@ async def get_server_info(self, **rpc_kwargs) -> Optional[ServerInfo]: grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ rpc_kwargs["metadata"] = self.generate_metadata_header( - metadata=rpc_kwargs.get("metadata")) - req = val_pb2.GetServerInfoRequest() + metadata=rpc_kwargs.get("metadata") + ) + req = val_v1.GetServerInfoRequest() logger.debug("%s: %s", type(req).__name__, req) try: - resp = await self.client_stub.GetServerInfo(req, **rpc_kwargs) + resp = await self.client_stub_v1.GetServerInfo(req, **rpc_kwargs) logger.debug("%s: %s", type(resp).__name__, resp) return ServerInfo.from_message(resp) except AioRpcError as exc: @@ -394,17 +523,23 @@ async def get_server_info(self, **rpc_kwargs) -> Optional[ServerInfo]: return None @check_connected_async - async def get_value_types(self, paths: Collection[str], **rpc_kwargs) -> Dict[str, DataType]: + async def get_value_types( + self, paths: Collection[str], **rpc_kwargs + ) -> Dict[str, DataType]: """ Parameters: rpc_kwargs grpc.*MultiCallable kwargs e.g. timeout, metadata, credentials. """ if paths: - entry_requests = (EntryRequest( - path=path, view=View.METADATA, fields=( - Field.METADATA_DATA_TYPE,), - ) for path in paths) + entry_requests = ( + EntryRequest( + path=path, + view=View.METADATA, + fields=(Field.METADATA_DATA_TYPE,), + ) + for path in paths + ) entries = await self.get(entries=entry_requests, **rpc_kwargs) return {entry.path: DataType(entry.metadata.data_type) for entry in entries} return {} @@ -415,16 +550,18 @@ def __init__(self, client: VSSClient): self.client = client self.subscribers = {} - async def add_subscriber(self, - subscribe_response_stream: AsyncIterator[List[EntryUpdate]], - callback: Callable[[Iterable[EntryUpdate]], None], - ) -> uuid.UUID: + async def add_subscriber( + self, + subscribe_response_stream: AsyncIterator[List[EntryUpdate]], + callback: Callable[[Iterable[EntryUpdate]], None], + ) -> uuid.UUID: # We expect the first SubscribeResponse to be immediately available and to only hold a status await subscribe_response_stream.__aiter__().__anext__() # pylint: disable=unnecessary-dunder-call sub_id = uuid.uuid4() new_sub_task = asyncio.create_task( - self._subscriber_loop(subscribe_response_stream, callback)) + self._subscriber_loop(subscribe_response_stream, callback) + ) self.subscribers[sub_id] = new_sub_task return sub_id @@ -433,7 +570,8 @@ async def remove_subscriber(self, subscription_id: uuid.UUID): subscriber_task = self.subscribers.pop(subscription_id) except KeyError as exc: raise ValueError( - f"Could not find subscription {str(subscription_id)}") from exc + f"Could not find subscription {str(subscription_id)}" + ) from exc subscriber_task.cancel() try: await subscriber_task diff --git a/kuksa-client/proto.py b/kuksa-client/proto.py new file mode 100644 index 0000000..1a9f44e --- /dev/null +++ b/kuksa-client/proto.py @@ -0,0 +1,32 @@ +# /******************************************************************************** +# * Copyright (c) 2024 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +import shutil +import os + +# this needs to be adapted once the submodules name or structure changes +PROTO_PATH = os.path.abspath("../submodules/kuksa-databroker/proto") + + +def main(): + for root, dirs, files in os.walk(PROTO_PATH): + for directory in dirs: + # Create an __init__.py file in each subdirectory + init_file = os.path.join(root, directory, "__init__.py") + with open(init_file, "w") as file: + file.write("# This file marks the directory as a Python module") + shutil.copytree(PROTO_PATH, os.getcwd(), dirs_exist_ok=True) + + +if __name__ == "__main__": + main() diff --git a/kuksa-client/requirements.txt b/kuksa-client/requirements.txt index cf5c757..2586140 100644 --- a/kuksa-client/requirements.txt +++ b/kuksa-client/requirements.txt @@ -10,15 +10,15 @@ cmd2==1.5.0 # via kuksa_client (setup.cfg) colorama==0.4.6 # via cmd2 -grpcio==1.64.1 +grpcio==1.66.2 # via grpcio-tools -grpcio-tools==1.64.1 +grpcio-tools==1.66.2 # via kuksa_client (setup.cfg) jsonpath-ng==1.6.1 # via kuksa_client (setup.cfg) ply==3.11 # via jsonpath-ng -protobuf==5.27.1 +protobuf==5.27.2 # via grpcio-tools pygments==2.18.0 # via kuksa_client (setup.cfg) diff --git a/kuksa-client/setup.py b/kuksa-client/setup.py index fcea7cb..fdaafe0 100644 --- a/kuksa-client/setup.py +++ b/kuksa-client/setup.py @@ -11,12 +11,14 @@ # * SPDX-License-Identifier: Apache-2.0 # ********************************************************************************/ import setuptools + try: from setuptools.command import build except ImportError: from distutils.command import build # pylint: disable=deprecated-module from setuptools.command import build_py from setuptools.command import sdist +from setuptools.command.develop import develop as _develop class BuildPackageProtos(setuptools.Command): @@ -36,7 +38,8 @@ def finalize_options(self): def run(self): from grpc_tools import command # pylint: disable=import-outside-toplevel - command.build_package_protos('.', strict_mode=True) + + command.build_package_protos(".", strict_mode=True) class BuildCommand(BuildPackageProtos, build.build): @@ -51,11 +54,19 @@ class SDistCommand(BuildPackageProtos, sdist.sdist): ... +class DevelopCommand(BuildPackageProtos, _develop): + + def run(self): + self.run_command("build_pb2") + super().run() + + setuptools.setup( cmdclass={ - 'build': BuildCommand, - 'build_pb2': BuildPackageProtosCommand, - 'build_py': BuildPyCommand, # Used for editable installs but also for building wheels - 'sdist': SDistCommand, + "build": BuildCommand, + "build_pb2": BuildPackageProtosCommand, + "build_py": BuildPyCommand, # Used for editable installs but also for building wheels + "sdist": SDistCommand, + "develop": DevelopCommand, # Also handle editable installs } ) diff --git a/kuksa-client/test-requirements.txt b/kuksa-client/test-requirements.txt index 4f5621a..97b7a2f 100644 --- a/kuksa-client/test-requirements.txt +++ b/kuksa-client/test-requirements.txt @@ -18,9 +18,9 @@ dill==0.3.8 # via pylint exceptiongroup==1.2.1 # via pytest -grpcio==1.64.1 +grpcio==1.66.2 # via grpcio-tools -grpcio-tools==1.64.1 +grpcio-tools==1.66.2 # via kuksa_client (setup.cfg) iniconfig==2.0.0 # via pytest @@ -38,7 +38,7 @@ pluggy==1.5.0 # via pytest ply==3.11 # via jsonpath-ng -protobuf==5.27.1 +protobuf==5.27.2 # via grpcio-tools pygments==2.18.0 # via kuksa_client (setup.cfg) diff --git a/kuksa-client/tests/conftest.py b/kuksa-client/tests/conftest.py index 16fa22a..9fe35a7 100644 --- a/kuksa-client/tests/conftest.py +++ b/kuksa-client/tests/conftest.py @@ -22,7 +22,8 @@ import pytest import pytest_asyncio -from kuksa.val.v1 import val_pb2_grpc +from kuksa.val.v1 import val_pb2_grpc as val_v1 +from kuksa.val.v2 import val_pb2_grpc as val_v2 import tests @@ -32,22 +33,32 @@ def resources_path_fixture(): return pathlib.Path(tests.__path__[0]) / 'resources' -@pytest.fixture(name='val_servicer', scope='function') -def val_servicer_fixture(mocker): - servicer = val_pb2_grpc.VALServicer() - mocker.patch.object(servicer, 'Get', spec=True) - mocker.patch.object(servicer, 'Set', spec=True) - mocker.patch.object(servicer, 'Subscribe', spec=True) - mocker.patch.object(servicer, 'GetServerInfo', spec=True) +@pytest.fixture(name="val_servicer_v1", scope="function") +def val_servicer_v1_fixture(mocker): + servicer_v1 = val_v1.VALServicer() + mocker.patch.object(servicer_v1, "Get", spec=True) + mocker.patch.object(servicer_v1, "Set", spec=True) + mocker.patch.object(servicer_v1, "Subscribe", spec=True) + mocker.patch.object(servicer_v1, "GetServerInfo", spec=True) - return servicer + return servicer_v1 -@pytest_asyncio.fixture(name='val_server', scope='function') -async def val_server_fixture(unused_tcp_port, val_servicer): +@pytest.fixture(name="val_servicer_v2", scope="function") +def val_servicer_v2_fixture(mocker): + servicer_v2 = val_v2.VALServicer() + mocker.patch.object(servicer_v2, "PublishValue", spec=True) + mocker.patch.object(servicer_v2, "Subscribe", spec=True) + + return servicer_v2 + + +@pytest_asyncio.fixture(name="val_server", scope="function") +async def val_server_fixture(unused_tcp_port, val_servicer_v1, val_servicer_v2): server = grpc.aio.server() - val_pb2_grpc.add_VALServicer_to_server(val_servicer, server) - server.add_insecure_port(f'127.0.0.1:{unused_tcp_port}') + val_v1.add_VALServicer_to_server(val_servicer_v1, server) + val_v2.add_VALServicer_to_server(val_servicer_v2, server) + server.add_insecure_port(f"127.0.0.1:{unused_tcp_port}") await server.start() try: yield server @@ -55,18 +66,26 @@ async def val_server_fixture(unused_tcp_port, val_servicer): await server.stop(grace=2.0) -@pytest_asyncio.fixture(name='secure_val_server', scope='function') -async def secure_val_server_fixture(unused_tcp_port, resources_path, val_servicer): +@pytest_asyncio.fixture(name="secure_val_server", scope="function") +async def secure_val_server_fixture( + unused_tcp_port, resources_path, val_servicer_v1, val_servicer_v2 +): server = grpc.aio.server() - val_pb2_grpc.add_VALServicer_to_server(val_servicer, server) - server.add_secure_port(f'localhost:{unused_tcp_port}', grpc.ssl_server_credentials( - private_key_certificate_chain_pairs=[( - (resources_path / 'test-server.key').read_bytes(), - (resources_path / 'test-server.pem').read_bytes(), - )], - root_certificates=(resources_path / 'test-ca.pem').read_bytes(), - require_client_auth=False, - )) + val_v1.add_VALServicer_to_server(val_servicer_v1, server) + val_v2.add_VALServicer_to_server(val_servicer_v2, server) + server.add_secure_port( + f"localhost:{unused_tcp_port}", + grpc.ssl_server_credentials( + private_key_certificate_chain_pairs=[ + ( + (resources_path / "test-server.key").read_bytes(), + (resources_path / "test-server.pem").read_bytes(), + ) + ], + root_certificates=(resources_path / "test-ca.pem").read_bytes(), + require_client_auth=False, + ), + ) await server.start() try: yield server diff --git a/kuksa-client/tests/test_grpc.py b/kuksa-client/tests/test_grpc.py index 687801f..806f787 100644 --- a/kuksa-client/tests/test_grpc.py +++ b/kuksa-client/tests/test_grpc.py @@ -26,8 +26,17 @@ import grpc.aio import pytest -from kuksa.val.v1 import val_pb2 -from kuksa.val.v1 import types_pb2 +from typing import Dict + +from kuksa.val.v1 import types_pb2 as types_v1 + +from kuksa.val.v1 import val_pb2 as val_v1 + +# from kuksa.val.v1 import val_pb2_grpc as val_grpc_v1 +from kuksa.val.v2 import types_pb2 as types_v2 +from kuksa.val.v2 import val_pb2 as val_v2 + +# from kuksa.val.v2 import val_pb2_grpc as val_grpc_v2 import kuksa_client.grpc from kuksa_client.grpc import Datapoint @@ -71,10 +80,10 @@ def test_from_grpc_error(self): assert client_error.errors == expected_client_error.errors def test_to_dict(self): - error = types_pb2.Error( - code=404, reason='not_found', message="Does.Not.Exist not found") - errors = (types_pb2.DataEntryError( - path='Does.Not.Exist', error=error),) + error = types_v1.Error( + code=404, reason="not_found", message="Does.Not.Exist not found" + ) + errors = (types_v1.DataEntryError(path="Does.Not.Exist", error=error),) error = json_format.MessageToDict( error, preserving_proto_field_name=True) errors = [json_format.MessageToDict( @@ -90,12 +99,14 @@ def test_to_dict(self): class TestMetadata: def test_to_message_empty(self): - assert Metadata().to_message() == types_pb2.Metadata() + assert Metadata().to_message() == types_v1.Metadata() def test_to_message_value_restriction_without_value_type(self): with pytest.raises(ValueError) as exc_info: - assert Metadata(value_restriction=ValueRestriction() - ).to_message() == types_pb2.Metadata() + assert ( + Metadata(value_restriction=ValueRestriction()).to_message() + == types_v1.Metadata() + ) assert exc_info.value.args == ( "Cannot set value_restriction from data type UNSPECIFIED",) @@ -130,13 +141,16 @@ def test_to_from_message_signed_value_restriction(self, value_type, min_value, m allowed_values = None if (min_value, max_value, allowed_values) == (None, None, None): - expected_message = types_pb2.Metadata() + expected_message = types_v1.Metadata() output_metadata = Metadata() else: - expected_message = types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction( - signed=types_pb2.ValueRestrictionInt( - min=min_value, max=max_value, allowed_values=allowed_values), - )) + expected_message = types_v1.Metadata( + value_restriction=types_v1.ValueRestriction( + signed=types_v1.ValueRestrictionInt( + min=min_value, max=max_value, allowed_values=allowed_values + ), + ) + ) output_metadata = Metadata(value_restriction=ValueRestriction( min=min_value, max=max_value, allowed_values=allowed_values, )) @@ -173,13 +187,16 @@ def test_to_from_message_unsigned_value_restriction(self, value_type, min_value, allowed_values = None if (min_value, max_value, allowed_values) == (None, None, None): - expected_message = types_pb2.Metadata() + expected_message = types_v1.Metadata() output_metadata = Metadata() else: - expected_message = types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction( - unsigned=types_pb2.ValueRestrictionUint( - min=min_value, max=max_value, allowed_values=allowed_values), - )) + expected_message = types_v1.Metadata( + value_restriction=types_v1.ValueRestriction( + unsigned=types_v1.ValueRestrictionUint( + min=min_value, max=max_value, allowed_values=allowed_values + ), + ) + ) output_metadata = Metadata(value_restriction=ValueRestriction( min=min_value, max=max_value, allowed_values=allowed_values, )) @@ -214,13 +231,16 @@ def test_to_from_message_float_value_restriction(self, value_type, min_value, ma allowed_values = None if (min_value, max_value, allowed_values) == (None, None, None): - expected_message = types_pb2.Metadata() + expected_message = types_v1.Metadata() output_metadata = Metadata() else: - expected_message = types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction( - floating_point=types_pb2.ValueRestrictionFloat( - min=min_value, max=max_value, allowed_values=allowed_values), - )) + expected_message = types_v1.Metadata( + value_restriction=types_v1.ValueRestriction( + floating_point=types_v1.ValueRestrictionFloat( + min=min_value, max=max_value, allowed_values=allowed_values + ), + ) + ) output_metadata = Metadata(value_restriction=ValueRestriction( min=min_value, max=max_value, allowed_values=allowed_values, )) @@ -241,13 +261,16 @@ def test_to_from_message_string_value_restriction(self, value_type, allowed_valu allowed_values = None if allowed_values is None: - expected_message = types_pb2.Metadata() + expected_message = types_v1.Metadata() output_metadata = Metadata() else: - expected_message = types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction( - string=types_pb2.ValueRestrictionString( - allowed_values=allowed_values), - )) + expected_message = types_v1.Metadata( + value_restriction=types_v1.ValueRestriction( + string=types_v1.ValueRestrictionString( + allowed_values=allowed_values + ), + ) + ) output_metadata = Metadata(value_restriction=ValueRestriction( allowed_values=allowed_values, )) @@ -260,7 +283,7 @@ def test_metadata_from_message_value_restriction_no_type(self): This intends to cover the case when the proto message has a value restriction, but no contents (type not specified) """ - input_message = types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction()) + input_message = types_v1.Metadata(value_restriction=types_v1.ValueRestriction()) expected_metadata = Metadata() assert Metadata.from_message(input_message) == expected_metadata @@ -336,25 +359,78 @@ def test_to_dict(self, init_kwargs, metadata_dict): class TestDatapoint: - @pytest.mark.parametrize('value_type, init_args, message', [ - (DataType.BOOLEAN, (None,), types_pb2.Datapoint()), - (DataType.BOOLEAN, ('False',), types_pb2.Datapoint(bool=False)), - (DataType.BOOLEAN, ('false',), types_pb2.Datapoint(bool=False)), - (DataType.BOOLEAN, ('F',), types_pb2.Datapoint(bool=False)), - (DataType.BOOLEAN, ('f',), types_pb2.Datapoint(bool=False)), - (DataType.BOOLEAN, (True, datetime.datetime(2022, 11, 16, tzinfo=datetime.timezone.utc)), types_pb2.Datapoint( - bool=True, timestamp=timestamp_pb2.Timestamp(seconds=1668556800), - )), - (DataType.INT8_ARRAY, ('[-128, 127]',), types_pb2.Datapoint( - int32_array=types_pb2.Int32Array(values=[-128, 127]))), - ]) - def test_to_message(self, value_type, init_args, message): - assert Datapoint(*init_args).to_message(value_type) == message + + @pytest.mark.parametrize( + "value_type, init_args, message_v1, message_v2", + [ + (DataType.BOOLEAN, (None,), types_v1.Datapoint(), types_v2.Datapoint()), + ( + DataType.BOOLEAN, + ("False",), + types_v1.Datapoint(bool=False), + types_v2.Datapoint(value=types_v2.Value(bool=False)), + ), + ( + DataType.BOOLEAN, + ("false",), + types_v1.Datapoint(bool=False), + types_v2.Datapoint(value=types_v2.Value(bool=False)), + ), + ( + DataType.BOOLEAN, + ("F",), + types_v1.Datapoint(bool=False), + types_v2.Datapoint(value=types_v2.Value(bool=False)), + ), + ( + DataType.BOOLEAN, + ("f",), + types_v1.Datapoint(bool=False), + types_v2.Datapoint(value=types_v2.Value(bool=False)), + ), + ( + DataType.BOOLEAN, + (True, datetime.datetime(2022, 11, 16, tzinfo=datetime.timezone.utc)), + types_v1.Datapoint( + bool=True, + timestamp=timestamp_pb2.Timestamp(seconds=1668556800), + ), + types_v2.Datapoint( + value=types_v2.Value(bool=True), + timestamp=timestamp_pb2.Timestamp(seconds=1668556800), + ), + ), + ( + DataType.INT8_ARRAY, + ("[-128, 127]",), + types_v1.Datapoint(int32_array=types_v1.Int32Array(values=[-128, 127])), + types_v2.Datapoint( + value=types_v2.Value( + int32_array=types_v2.Int32Array(values=[-128, 127]) + ), + ), + ), + ], + ) + def test_to_message(self, value_type, init_args, message_v1, message_v2): + assert Datapoint(*init_args).v1_to_message(value_type) == message_v1 + assert Datapoint(*init_args).v2_to_message(value_type) == message_v2 @pytest.mark.parametrize('value_type', [DataType.UNSPECIFIED, DataType.TIMESTAMP, DataType.TIMESTAMP_ARRAY]) - def test_to_message_unsupported_value_type(self, value_type): + def test_v1_to_message_unsupported_value_type(self, value_type): + with pytest.raises(ValueError) as exc_info: + Datapoint(42).v1_to_message(value_type) + assert exc_info.value.args[0].startswith( + "Cannot determine which field to set with data type" + ) + + @pytest.mark.parametrize( + "value_type", + [DataType.UNSPECIFIED, DataType.TIMESTAMP, DataType.TIMESTAMP_ARRAY], + ) + def test_v2_to_message_unsupported_value_type(self, value_type): with pytest.raises(ValueError) as exc_info: - Datapoint(42).to_message(value_type) + Datapoint(42).v2_to_message(value_type) assert exc_info.value.args[0].startswith( 'Cannot determine which field to set with data type') @@ -400,15 +476,19 @@ def test_to_dict(self, entry, fields, update_dict): @pytest.mark.asyncio class TestVSSClient: - @pytest.mark.usefixtures('secure_val_server') - async def test_secure_connection(self, unused_tcp_port, resources_path, val_servicer): - val_servicer.GetServerInfo.return_value = val_pb2.GetServerInfoResponse( - name='test_server', version='1.2.3') + + @pytest.mark.usefixtures("secure_val_server") + async def test_secure_connection( + self, unused_tcp_port, resources_path, val_servicer_v1 + ): + val_servicer_v1.GetServerInfo.return_value = val_v1.GetServerInfoResponse( + name="test_server", version="1.2.3" + ) async with VSSClient('localhost', unused_tcp_port, root_certificates=resources_path / 'test-ca.pem', ensure_startup_connection=True ): - assert val_servicer.GetServerInfo.call_count == 1 + assert val_servicer_v1.GetServerInfo.call_count == 1 async def test_get_current_values(self, mocker, unused_tcp_port): client = VSSClient('127.0.0.1', unused_tcp_port) @@ -665,64 +745,85 @@ async def subscribe_response_stream(**kwargs): 'Vehicle.Chassis.Height': Metadata(entry_type=EntryType.ATTRIBUTE), } - @pytest.mark.usefixtures('val_server') - async def test_get_some_entries(self, unused_tcp_port, val_servicer): - val_servicer.Get.return_value = val_pb2.GetResponse(entries=[ - types_pb2.DataEntry( - path='Vehicle.Speed', - value=types_pb2.Datapoint( - timestamp=timestamp_pb2.Timestamp( - seconds=1667837915, nanos=247307674), - float=42.0, + @pytest.mark.usefixtures("val_server") + async def test_get_some_entries(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.Get.return_value = val_v1.GetResponse( + entries=[ + types_v1.DataEntry( + path="Vehicle.Speed", + value=types_v1.Datapoint( + timestamp=timestamp_pb2.Timestamp( + seconds=1667837915, nanos=247307674 + ), + float=42.0, + ), ), - ), - types_pb2.DataEntry(path='Vehicle.ADAS.ABS.IsActive', - actuator_target=types_pb2.Datapoint(bool=True)), - types_pb2.DataEntry( - path='Vehicle.Chassis.Height', - metadata=types_pb2.Metadata( - data_type=types_pb2.DATA_TYPE_UINT16, - entry_type=types_pb2.ENTRY_TYPE_ATTRIBUTE, - description="Overall vehicle height, in mm.", - comment="No comment.", - deprecation="V2.1 moved to Vehicle.Height", - unit="mm", + types_v1.DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + actuator_target=types_v1.Datapoint(bool=True), ), - ), - types_pb2.DataEntry( - path='Vehicle.Chassis.Height', metadata=types_pb2.Metadata(data_type=types_pb2.DATA_TYPE_UINT16), - ), - types_pb2.DataEntry( - path='Vehicle.Chassis.Height', metadata=types_pb2.Metadata(entry_type=types_pb2.ENTRY_TYPE_ATTRIBUTE), - ), - types_pb2.DataEntry( - path='Vehicle.Chassis.Height', - metadata=types_pb2.Metadata( - description="Overall vehicle height, in mm."), - ), - types_pb2.DataEntry( - path='Vehicle.Chassis.Height', metadata=types_pb2.Metadata(comment="No comment."), - ), - types_pb2.DataEntry( - path='Vehicle.Chassis.Height', metadata=types_pb2.Metadata(deprecation="V2.1 moved to Vehicle.Height"), - ), - types_pb2.DataEntry(path='Vehicle.Chassis.Height', - metadata=types_pb2.Metadata(unit="mm")), - types_pb2.DataEntry( - path='Vehicle.CurrentLocation.Heading', - metadata=types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction( - floating_point=types_pb2.ValueRestrictionFloat( - min=0, max=360), - )), - ), - types_pb2.DataEntry( - path='Dummy.With.Allowed.Values', - metadata=types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction( - signed=types_pb2.ValueRestrictionInt( - allowed_values=[12, 42, 666]), - )), - ), - ]) + types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata( + data_type=types_v1.DATA_TYPE_UINT16, + entry_type=types_v1.ENTRY_TYPE_ATTRIBUTE, + description="Overall vehicle height, in mm.", + comment="No comment.", + deprecation="V2.1 moved to Vehicle.Height", + unit="mm", + ), + ), + types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata(data_type=types_v1.DATA_TYPE_UINT16), + ), + types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata( + entry_type=types_v1.ENTRY_TYPE_ATTRIBUTE + ), + ), + types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata( + description="Overall vehicle height, in mm." + ), + ), + types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata(comment="No comment."), + ), + types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata( + deprecation="V2.1 moved to Vehicle.Height" + ), + ), + types_v1.DataEntry( + path="Vehicle.Chassis.Height", metadata=types_v1.Metadata(unit="mm") + ), + types_v1.DataEntry( + path="Vehicle.CurrentLocation.Heading", + metadata=types_v1.Metadata( + value_restriction=types_v1.ValueRestriction( + floating_point=types_v1.ValueRestrictionFloat( + min=0, max=360 + ), + ) + ), + ), + types_v1.DataEntry( + path="Dummy.With.Allowed.Values", + metadata=types_v1.Metadata( + value_restriction=types_v1.ValueRestriction( + signed=types_v1.ValueRestrictionInt( + allowed_values=[12, 42, 666] + ), + ) + ), + ), + ] + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: entries = await client.get(entries=(entry for entry in ( # generator is intentional as get accepts Iterable @@ -784,66 +885,74 @@ async def test_get_some_entries(self, unused_tcp_port, val_servicer): allowed_values=[12, 42, 666]), )), ] - assert val_servicer.Get.call_args[0][0].entries == val_pb2.GetRequest(entries=( - val_pb2.EntryRequest( - path='Vehicle.Speed', view=types_pb2.VIEW_CURRENT_VALUE, fields=(types_pb2.FIELD_VALUE,), - ), - val_pb2.EntryRequest( - path='Vehicle.ADAS.ABS.IsActive', - view=types_pb2.VIEW_TARGET_VALUE, - fields=(types_pb2.FIELD_ACTUATOR_TARGET,), - ), - val_pb2.EntryRequest( - path='Vehicle.Chassis.Height', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA,), - ), - val_pb2.EntryRequest( - path='Vehicle.Chassis.Height', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_DATA_TYPE,), - ), - val_pb2.EntryRequest( - path='Vehicle.Chassis.Height', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_DESCRIPTION,), - ), - val_pb2.EntryRequest( - path='Vehicle.Chassis.Height', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_ENTRY_TYPE,), - ), - val_pb2.EntryRequest( - path='Vehicle.Chassis.Height', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_COMMENT,), - ), - val_pb2.EntryRequest( - path='Vehicle.Chassis.Height', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_DEPRECATION,), - ), - val_pb2.EntryRequest( - path='Vehicle.Chassis.Height', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_UNIT,), - ), - val_pb2.EntryRequest( - path='Vehicle.CurrentLocation.Heading', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_VALUE_RESTRICTION,), - ), - val_pb2.EntryRequest( - path='Dummy.With.Allowed.Values', - view=types_pb2.VIEW_METADATA, - fields=(types_pb2.FIELD_METADATA_VALUE_RESTRICTION,), - ), - )).entries + assert ( + val_servicer_v1.Get.call_args[0][0].entries + == val_v1.GetRequest( + entries=( + val_v1.EntryRequest( + path="Vehicle.Speed", + view=types_v1.VIEW_CURRENT_VALUE, + fields=(types_v1.FIELD_VALUE,), + ), + val_v1.EntryRequest( + path="Vehicle.ADAS.ABS.IsActive", + view=types_v1.VIEW_TARGET_VALUE, + fields=(types_v1.FIELD_ACTUATOR_TARGET,), + ), + val_v1.EntryRequest( + path="Vehicle.Chassis.Height", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA,), + ), + val_v1.EntryRequest( + path="Vehicle.Chassis.Height", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_DATA_TYPE,), + ), + val_v1.EntryRequest( + path="Vehicle.Chassis.Height", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_DESCRIPTION,), + ), + val_v1.EntryRequest( + path="Vehicle.Chassis.Height", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_ENTRY_TYPE,), + ), + val_v1.EntryRequest( + path="Vehicle.Chassis.Height", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_COMMENT,), + ), + val_v1.EntryRequest( + path="Vehicle.Chassis.Height", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_DEPRECATION,), + ), + val_v1.EntryRequest( + path="Vehicle.Chassis.Height", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_UNIT,), + ), + val_v1.EntryRequest( + path="Vehicle.CurrentLocation.Heading", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_VALUE_RESTRICTION,), + ), + val_v1.EntryRequest( + path="Dummy.With.Allowed.Values", + view=types_v1.VIEW_METADATA, + fields=(types_v1.FIELD_METADATA_VALUE_RESTRICTION,), + ), + ) + ).entries + ) - @pytest.mark.usefixtures('val_server') - async def test_get_no_entries_requested(self, unused_tcp_port, val_servicer): - val_servicer.Get.side_effect = generate_error( - grpc.StatusCode.INVALID_ARGUMENT, 'No datapoints requested') + @pytest.mark.usefixtures("val_server") + async def test_get_no_entries_requested(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.Get.side_effect = generate_error( + grpc.StatusCode.INVALID_ARGUMENT, "No datapoints requested" + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: with pytest.raises(kuksa_client.grpc.VSSClientError) as exc_info: await client.get(entries=[]) @@ -853,14 +962,16 @@ async def test_get_no_entries_requested(self, unused_tcp_port, val_servicer): 'reason': grpc.StatusCode.INVALID_ARGUMENT.value[1], 'message': 'No datapoints requested', }, errors=[]).args - assert val_servicer.Get.call_args[0][0] == val_pb2.GetRequest() - - @pytest.mark.usefixtures('val_server') - async def test_get_unset_entries(self, unused_tcp_port, val_servicer): - val_servicer.Get.return_value = val_pb2.GetResponse(entries=[ - types_pb2.DataEntry(path='Vehicle.Speed'), - types_pb2.DataEntry(path='Vehicle.ADAS.ABS.IsActive'), - ]) + assert val_servicer_v1.Get.call_args[0][0] == val_v1.GetRequest() + + @pytest.mark.usefixtures("val_server") + async def test_get_unset_entries(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.Get.return_value = val_v1.GetResponse( + entries=[ + types_v1.DataEntry(path="Vehicle.Speed"), + types_v1.DataEntry(path="Vehicle.ADAS.ABS.IsActive"), + ] + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: entries = await client.get(entries=( EntryRequest('Vehicle.Speed', @@ -871,14 +982,15 @@ async def test_get_unset_entries(self, unused_tcp_port, val_servicer): assert entries == [DataEntry('Vehicle.Speed'), DataEntry( 'Vehicle.ADAS.ABS.IsActive')] - @pytest.mark.usefixtures('val_server') - async def test_get_nonexistent_entries(self, unused_tcp_port, val_servicer): - error = types_pb2.Error( - code=404, reason='not_found', message="Does.Not.Exist not found") - errors = (types_pb2.DataEntryError( - path='Does.Not.Exist', error=error),) - val_servicer.Get.return_value = val_pb2.GetResponse( - error=error, errors=errors) + @pytest.mark.usefixtures("val_server") + async def test_get_nonexistent_entries(self, unused_tcp_port, val_servicer_v1): + error = types_v1.Error( + code=404, reason="not_found", message="Does.Not.Exist not found" + ) + errors = (types_v1.DataEntryError(path="Does.Not.Exist", error=error),) + val_servicer_v1.Get.return_value = val_v1.GetResponse( + error=error, errors=errors + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: with pytest.raises(VSSClientError): await client.get(entries=( @@ -886,134 +998,328 @@ async def test_get_nonexistent_entries(self, unused_tcp_port, val_servicer): View.CURRENT_VALUE, (Field.VALUE,)), )) - @pytest.mark.usefixtures('val_server') - async def test_set_some_updates(self, unused_tcp_port, val_servicer): - val_servicer.Get.return_value = val_pb2.GetResponse(entries=( - types_pb2.DataEntry( - path='Vehicle.Speed', metadata=types_pb2.Metadata(data_type=types_pb2.DATA_TYPE_FLOAT), - ), - types_pb2.DataEntry( - path='Vehicle.ADAS.ABS.IsActive', - metadata=types_pb2.Metadata( - data_type=types_pb2.DATA_TYPE_BOOLEAN), - ), - types_pb2.DataEntry( - path='Vehicle.Cabin.Door.Row1.Left.Shade.Position', - metadata=types_pb2.Metadata( - data_type=types_pb2.DATA_TYPE_UINT8), - ), - )) - val_servicer.Set.return_value = val_pb2.SetResponse() - async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: - await client.set(updates=[ - EntryUpdate(DataEntry('Vehicle.Speed', - value=Datapoint(value=42.0)), (Field.VALUE,)), - EntryUpdate(DataEntry( - 'Vehicle.ADAS.ABS.IsActive', actuator_target=Datapoint(value=False), - ), (Field.ACTUATOR_TARGET,)), - EntryUpdate(DataEntry('Vehicle.ADAS.CruiseControl.Error', metadata=Metadata( - data_type=DataType.BOOLEAN, - entry_type=EntryType.SENSOR, - description="Indicates if cruise control system incurred and error condition.", - comment="No comment", - deprecation="Never to be deprecated", - unit=None, - value_restriction=None, - )), (Field.METADATA,)), - EntryUpdate(DataEntry('Vehicle.ADAS.CruiseControl.Error', metadata=Metadata( - data_type=DataType.BOOLEAN, - )), (Field.METADATA_DATA_TYPE,)), - EntryUpdate(DataEntry('Vehicle.ADAS.CruiseControl.Error', metadata=Metadata( - description="Indicates if cruise control system incurred and error condition.", - )), (Field.METADATA_DESCRIPTION,)), - EntryUpdate(DataEntry('Vehicle.ADAS.CruiseControl.Error', metadata=Metadata( - entry_type=EntryType.SENSOR, - )), (Field.METADATA_ENTRY_TYPE,)), - EntryUpdate(DataEntry('Vehicle.ADAS.CruiseControl.Error', metadata=Metadata( - comment="No comment", - )), (Field.METADATA_COMMENT,)), - EntryUpdate(DataEntry('Vehicle.ADAS.CruiseControl.Error', metadata=Metadata( - deprecation="Never to be deprecated", - )), (Field.METADATA_DEPRECATION,)), - EntryUpdate(DataEntry('Vehicle.Cabin.Door.Row1.Left.Shade.Position', metadata=Metadata( - unit='percent', - )), (Field.METADATA_UNIT,)), - EntryUpdate(DataEntry('Vehicle.Cabin.Door.Row1.Left.Shade.Position', metadata=Metadata( - value_restriction=ValueRestriction(min=0, max=100), - )), (Field.METADATA_VALUE_RESTRICTION,)), - ]) - assert val_servicer.Get.call_count == 1 - assert val_servicer.Get.call_args[0][0].entries == val_pb2.GetRequest(entries=( - val_pb2.EntryRequest(path='Vehicle.Speed', view=View.METADATA, fields=( - Field.METADATA_DATA_TYPE,)), - val_pb2.EntryRequest( - path='Vehicle.ADAS.ABS.IsActive', view=View.METADATA, fields=(Field.METADATA_DATA_TYPE,), + @pytest.mark.usefixtures("val_server") + async def test_set_some_updates_v1(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.Get.return_value = val_v1.GetResponse( + entries=( + types_v1.DataEntry( + path="Vehicle.Speed", + metadata=types_v1.Metadata(data_type=types_v1.DATA_TYPE_FLOAT), + ), + types_v1.DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + metadata=types_v1.Metadata(data_type=types_v1.DATA_TYPE_BOOLEAN), ), - val_pb2.EntryRequest( - path='Vehicle.Cabin.Door.Row1.Left.Shade.Position', - view=View.METADATA, - fields=(Field.METADATA_DATA_TYPE,), + types_v1.DataEntry( + path="Vehicle.Cabin.Door.Row1.Left.Shade.Position", + metadata=types_v1.Metadata(data_type=types_v1.DATA_TYPE_UINT8), ), - )).entries - assert val_servicer.Set.call_args[0][0].updates == val_pb2.SetRequest(updates=( - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Speed', value=types_pb2.Datapoint(float=42.0), - ), fields=(types_pb2.FIELD_VALUE,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.ABS.IsActive', actuator_target=types_pb2.Datapoint(bool=False), - ), fields=(types_pb2.FIELD_ACTUATOR_TARGET,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.CruiseControl.Error', - metadata=types_pb2.Metadata( - data_type=types_pb2.DATA_TYPE_BOOLEAN, - entry_type=types_pb2.ENTRY_TYPE_SENSOR, - description="Indicates if cruise control system incurred and error condition.", - comment="No comment", - deprecation="Never to be deprecated", + ) + ) + val_servicer_v1.Set.return_value = val_v1.SetResponse() + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + await client.set( + updates=[ + EntryUpdate( + DataEntry("Vehicle.Speed", value=Datapoint(value=42.0)), + (Field.VALUE,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.ABS.IsActive", + actuator_target=Datapoint(value=False), + ), + (Field.ACTUATOR_TARGET,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.CruiseControl.Error", + metadata=Metadata( + data_type=DataType.BOOLEAN, + entry_type=EntryType.SENSOR, + description="Indicates if cruise control system incurred and error condition.", + comment="No comment", + deprecation="Never to be deprecated", + unit=None, + value_restriction=None, + ), + ), + (Field.METADATA,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.CruiseControl.Error", + metadata=Metadata( + data_type=DataType.BOOLEAN, + ), + ), + (Field.METADATA_DATA_TYPE,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.CruiseControl.Error", + metadata=Metadata( + description="Indicates if cruise control system incurred and error condition.", + ), + ), + (Field.METADATA_DESCRIPTION,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.CruiseControl.Error", + metadata=Metadata( + entry_type=EntryType.SENSOR, + ), + ), + (Field.METADATA_ENTRY_TYPE,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.CruiseControl.Error", + metadata=Metadata( + comment="No comment", + ), + ), + (Field.METADATA_COMMENT,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.CruiseControl.Error", + metadata=Metadata( + deprecation="Never to be deprecated", + ), + ), + (Field.METADATA_DEPRECATION,), + ), + EntryUpdate( + DataEntry( + "Vehicle.Cabin.Door.Row1.Left.Shade.Position", + metadata=Metadata( + unit="percent", + ), + ), + (Field.METADATA_UNIT,), ), - ), fields=(types_pb2.FIELD_METADATA,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.CruiseControl.Error', - metadata=types_pb2.Metadata( - data_type=types_pb2.DATA_TYPE_BOOLEAN), - ), fields=(types_pb2.FIELD_METADATA_DATA_TYPE,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.CruiseControl.Error', - metadata=types_pb2.Metadata( - description="Indicates if cruise control system incurred and error condition." + EntryUpdate( + DataEntry( + "Vehicle.Cabin.Door.Row1.Left.Shade.Position", + metadata=Metadata( + value_restriction=ValueRestriction(min=0, max=100), + ), + ), + (Field.METADATA_VALUE_RESTRICTION,), ), - ), fields=(types_pb2.FIELD_METADATA_DESCRIPTION,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.CruiseControl.Error', - metadata=types_pb2.Metadata( - entry_type=types_pb2.ENTRY_TYPE_SENSOR), - ), fields=(types_pb2.FIELD_METADATA_ENTRY_TYPE,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.CruiseControl.Error', - metadata=types_pb2.Metadata(comment="No comment"), - ), fields=(types_pb2.FIELD_METADATA_COMMENT,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.CruiseControl.Error', - metadata=types_pb2.Metadata( - deprecation="Never to be deprecated"), - ), fields=(types_pb2.FIELD_METADATA_DEPRECATION,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Cabin.Door.Row1.Left.Shade.Position', - metadata=types_pb2.Metadata(unit="percent"), - ), fields=(types_pb2.FIELD_METADATA_UNIT,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Cabin.Door.Row1.Left.Shade.Position', - metadata=types_pb2.Metadata(value_restriction=types_pb2.ValueRestriction( - unsigned=types_pb2.ValueRestrictionUint( - min=0, max=100), - )), - ), fields=(types_pb2.FIELD_METADATA_VALUE_RESTRICTION,)), - )).updates - - @pytest.mark.usefixtures('val_server') - async def test_set_no_updates_provided(self, unused_tcp_port, val_servicer): - val_servicer.Set.side_effect = generate_error( - grpc.StatusCode.INVALID_ARGUMENT, 'No datapoints requested') + ] + ) + assert val_servicer_v1.Get.call_count == 1 + assert ( + val_servicer_v1.Get.call_args[0][0].entries + == val_v1.GetRequest( + entries=( + val_v1.EntryRequest( + path="Vehicle.Speed", + view=View.METADATA, + fields=(Field.METADATA_DATA_TYPE,), + ), + val_v1.EntryRequest( + path="Vehicle.ADAS.ABS.IsActive", + view=View.METADATA, + fields=(Field.METADATA_DATA_TYPE,), + ), + val_v1.EntryRequest( + path="Vehicle.Cabin.Door.Row1.Left.Shade.Position", + view=View.METADATA, + fields=(Field.METADATA_DATA_TYPE,), + ), + ) + ).entries + ) + assert ( + val_servicer_v1.Set.call_args[0][0].updates + == val_v1.SetRequest( + updates=( + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Speed", + value=types_v1.Datapoint(float=42.0), + ), + fields=(types_v1.FIELD_VALUE,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + actuator_target=types_v1.Datapoint(bool=False), + ), + fields=(types_v1.FIELD_ACTUATOR_TARGET,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.CruiseControl.Error", + metadata=types_v1.Metadata( + data_type=types_v1.DATA_TYPE_BOOLEAN, + entry_type=types_v1.ENTRY_TYPE_SENSOR, + description="Indicates if cruise control system incurred and error condition.", + comment="No comment", + deprecation="Never to be deprecated", + ), + ), + fields=(types_v1.FIELD_METADATA,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.CruiseControl.Error", + metadata=types_v1.Metadata( + data_type=types_v1.DATA_TYPE_BOOLEAN + ), + ), + fields=(types_v1.FIELD_METADATA_DATA_TYPE,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.CruiseControl.Error", + metadata=types_v1.Metadata( + description="Indicates if cruise control system incurred and error condition." + ), + ), + fields=(types_v1.FIELD_METADATA_DESCRIPTION,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.CruiseControl.Error", + metadata=types_v1.Metadata( + entry_type=types_v1.ENTRY_TYPE_SENSOR + ), + ), + fields=(types_v1.FIELD_METADATA_ENTRY_TYPE,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.CruiseControl.Error", + metadata=types_v1.Metadata(comment="No comment"), + ), + fields=(types_v1.FIELD_METADATA_COMMENT,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.CruiseControl.Error", + metadata=types_v1.Metadata( + deprecation="Never to be deprecated" + ), + ), + fields=(types_v1.FIELD_METADATA_DEPRECATION,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Cabin.Door.Row1.Left.Shade.Position", + metadata=types_v1.Metadata(unit="percent"), + ), + fields=(types_v1.FIELD_METADATA_UNIT,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Cabin.Door.Row1.Left.Shade.Position", + metadata=types_v1.Metadata( + value_restriction=types_v1.ValueRestriction( + unsigned=types_v1.ValueRestrictionUint( + min=0, max=100 + ), + ) + ), + ), + fields=(types_v1.FIELD_METADATA_VALUE_RESTRICTION,), + ), + ) + ).updates + ) + + @pytest.mark.usefixtures("val_server") + async def test_set_some_updates_v2( + self, unused_tcp_port, val_servicer_v2, val_servicer_v1 + ): + val_servicer_v1.Get.return_value = val_v1.GetResponse( + entries=( + types_v1.DataEntry( + path="Vehicle.Speed", + metadata=types_v1.Metadata(data_type=types_v1.DATA_TYPE_FLOAT), + ), + types_v1.DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + metadata=types_v1.Metadata(data_type=types_v1.DATA_TYPE_BOOLEAN), + ), + ) + ) + val_servicer_v2.PublishValue.return_value = val_v2.PublishValueResponse() + _updates = [ + EntryUpdate( + DataEntry("Vehicle.Speed", value=Datapoint(value=42.0)), + (Field.VALUE,), + ), + EntryUpdate( + DataEntry( + "Vehicle.ADAS.ABS.IsActive", + value=Datapoint(value=False), + ), + (Field.VALUE,), + ), + ] + + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + await client.set( + updates=_updates, + try_v2=True, + ) + assert val_servicer_v1.Get.call_count == 1 + assert ( + val_servicer_v1.Get.call_args[0][0].entries + == val_v1.GetRequest( + entries=( + val_v1.EntryRequest( + path="Vehicle.Speed", + view=View.METADATA, + fields=(Field.METADATA_DATA_TYPE,), + ), + val_v1.EntryRequest( + path="Vehicle.ADAS.ABS.IsActive", + view=View.METADATA, + fields=(Field.METADATA_DATA_TYPE,), + ), + ) + ).entries + ) + + expected_requests = [ + val_v2.PublishValueRequest( + signal_id=types_v2.SignalID(path="Vehicle.Speed"), + data_point=types_v2.Datapoint(value=types_v2.Value(float=42.0)), + ), + val_v2.PublishValueRequest( + signal_id=types_v2.SignalID(path="Vehicle.ADAS.ABS.IsActive"), + data_point=types_v2.Datapoint(value=types_v2.Value(bool=False)), + ), + ] + + assert val_servicer_v2.PublishValue.call_count == len(_updates) + + actual_requests = [ + call[0][0] for call in val_servicer_v2.PublishValue.call_args_list + ] + + for actual_request, expected_request in zip( + actual_requests, expected_requests + ): + assert actual_request == expected_request + + @pytest.mark.usefixtures("val_server") + async def test_set_no_updates_provided( + self, unused_tcp_port, val_servicer_v1, val_servicer_v2 + ): + val_servicer_v1.Set.side_effect = generate_error( + grpc.StatusCode.INVALID_ARGUMENT, "No datapoints requested" + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: with pytest.raises(kuksa_client.grpc.VSSClientError) as exc_info: await client.set(updates=[]) @@ -1023,44 +1329,133 @@ async def test_set_no_updates_provided(self, unused_tcp_port, val_servicer): 'reason': grpc.StatusCode.INVALID_ARGUMENT.value[1], 'message': 'No datapoints requested', }, errors=[]).args - assert val_servicer.Get.call_count == 0 - assert val_servicer.Set.call_args[0][0].updates == val_pb2.SetRequest( - ).updates - - @pytest.mark.usefixtures('val_server') - async def test_set_nonexistent_entries(self, unused_tcp_port, val_servicer): - error = types_pb2.Error( - code=404, reason='not_found', message="Does.Not.Exist not found") - errors = (types_pb2.DataEntryError( - path='Does.Not.Exist', error=error),) - val_servicer.Get.return_value = val_pb2.GetResponse( - error=error, errors=errors) - val_servicer.Set.return_value = val_pb2.SetResponse( - error=error, errors=errors) - async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: + assert val_servicer_v1.Get.call_count == 0 + assert ( + val_servicer_v1.Set.call_args[0][0].updates + == val_v1.SetRequest().updates + ) + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + with pytest.raises(kuksa_client.grpc.VSSClientError) as exc_info: + await client.set(updates=[], try_v2=True) + + assert ( + exc_info.value.args + == kuksa_client.grpc.VSSClientError( + error={ + "code": grpc.StatusCode.INVALID_ARGUMENT.value[0], + "reason": grpc.StatusCode.INVALID_ARGUMENT.value[1], + "message": "No datapoints requested", + }, + errors=[], + ).args + ) + assert val_servicer_v1.Get.call_count == 0 + assert val_servicer_v2.PublishValue.call_count == 0 + + @pytest.mark.usefixtures("val_server") + async def test_set_nonexistent_entries_v1(self, unused_tcp_port, val_servicer_v1): + error = types_v1.Error( + code=404, reason="not_found", message="Does.Not.Exist not found" + ) + errors = (types_v1.DataEntryError(path="Does.Not.Exist", error=error),) + val_servicer_v1.Get.return_value = val_v1.GetResponse( + error=error, errors=errors + ) + val_servicer_v1.Set.return_value = val_v1.SetResponse( + error=error, errors=errors + ) + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + with pytest.raises(VSSClientError): + await client.set( + updates=( + EntryUpdate( + DataEntry("Does.Not.Exist", value=Datapoint(value=42.0)), + (Field.VALUE,), + ), + ), + ) + + assert val_servicer_v1.Get.call_count == 1 + assert val_servicer_v1.Set.call_count == 0 + with pytest.raises(VSSClientError): + await client.set( + updates=( + EntryUpdate( + DataEntry( + "Does.Not.Exist", + value=Datapoint(value=42.0), + metadata=Metadata(data_type=DataType.FLOAT), + ), + (Field.VALUE,), + ), + ), + ) + + assert ( + val_servicer_v1.Get.call_count == 1 + ) # Get should'nt have been called again + assert val_servicer_v1.Set.call_count == 1 + + @pytest.mark.usefixtures("val_server") + async def test_set_nonexistent_entries_v2( + self, unused_tcp_port, val_servicer_v2, val_servicer_v1 + ): + error = types_v1.Error( + code=404, reason="not_found", message="Does.Not.Exist not found" + ) + errors = (types_v1.DataEntryError(path="Does.Not.Exist", error=error),) + val_servicer_v1.Get.return_value = val_v1.GetResponse( + error=error, errors=errors + ) + val_servicer_v2.PublishValue.side_effect = generate_error( + grpc.StatusCode.NOT_FOUND, + "Does.Not.Exist not found", + ) + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: with pytest.raises(VSSClientError): - await client.set(updates=( - EntryUpdate(DataEntry('Does.Not.Exist', value=Datapoint(value=42.0)), (Field.VALUE,)),), + await client.set( + updates=( + EntryUpdate( + DataEntry("Does.Not.Exist", value=Datapoint(value=42.0)), + (Field.VALUE,), + ), + ), + try_v2=True, ) - assert val_servicer.Get.call_count == 1 - assert val_servicer.Set.call_count == 0 + assert val_servicer_v1.Get.call_count == 1 + assert val_servicer_v2.PublishValue.call_count == 0 with pytest.raises(VSSClientError): - await client.set(updates=( - EntryUpdate(DataEntry( - 'Does.Not.Exist', - value=Datapoint(value=42.0), - metadata=Metadata(data_type=DataType.FLOAT), - ), (Field.VALUE,)),), + await client.set( + updates=( + EntryUpdate( + DataEntry( + "Does.Not.Exist", + value=Datapoint(value=42.0), + metadata=Metadata(data_type=DataType.FLOAT), + ), + (Field.VALUE,), + ), + ), + try_v2=True, ) - assert val_servicer.Get.call_count == 1 # Get should'nt have been called again - assert val_servicer.Set.call_count == 1 + assert ( + val_servicer_v1.Get.call_count == 1 + ) # Get should'nt have been called again + assert val_servicer_v2.PublishValue.call_count == 1 - @pytest.mark.usefixtures('val_server') - async def test_authorize_successful(self, unused_tcp_port, val_servicer): - val_servicer.GetServerInfo.return_value = val_pb2.GetServerInfoResponse( - name='test_server', version='1.2.3') + @pytest.mark.usefixtures("val_server") + async def test_authorize_successful(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.GetServerInfo.return_value = val_v1.GetServerInfoResponse( + name="test_server", version="1.2.3" + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: # token from kuksa.val directory under jwt/provide-vehicle-speed.token token = ('eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJsb2NhbCBkZXYiLCJpc3MiOiJjcmVhdGVUb2' @@ -1091,123 +1486,354 @@ async def test_authorize_successful(self, unused_tcp_port, val_servicer): assert client.authorization_header == bearer assert success == "Authenticated" - @pytest.mark.usefixtures('val_server') - async def test_authorize_unsuccessful(self, unused_tcp_port, val_servicer): - val_servicer.GetServerInfo.side_effect = generate_error( - grpc.StatusCode.UNAUTHENTICATED, 'Invalid auth token: DecodeError(\"InvalidToken\")') + @pytest.mark.usefixtures("val_server") + async def test_authorize_unsuccessful(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.GetServerInfo.side_effect = generate_error( + grpc.StatusCode.UNAUTHENTICATED, + 'Invalid auth token: DecodeError("InvalidToken")', + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: with pytest.raises(VSSClientError): await client.authorize(token='') assert client.authorization_header is None - @pytest.mark.usefixtures('val_server') - async def test_subscribe_some_entries(self, mocker, unused_tcp_port, val_servicer): + @pytest.mark.usefixtures("val_server") + async def test_subscribe_some_entries_v1( + self, mocker, unused_tcp_port, val_servicer_v1 + ): async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: responses = ( # 1st response is subscription ack - val_pb2.SubscribeResponse(updates=[ - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Speed', - value=types_pb2.Datapoint( - timestamp=timestamp_pb2.Timestamp( - seconds=1667837915, nanos=247307674), - float=42.0, - ), - ), fields=(Field.VALUE,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.ABS.IsActive', - actuator_target=types_pb2.Datapoint(bool=True), - ), fields=(Field.ACTUATOR_TARGET,)), - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Chassis.Height', - metadata=types_pb2.Metadata( - data_type=types_pb2.DATA_TYPE_UINT16, - ), - ), fields=(Field.METADATA_DATA_TYPE,)), - ]), + val_v1.SubscribeResponse( + updates=[ + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Speed", + value=types_v1.Datapoint( + timestamp=timestamp_pb2.Timestamp( + seconds=1667837915, nanos=247307674 + ), + float=42.0, + ), + ), + fields=(Field.VALUE,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + actuator_target=types_v1.Datapoint(bool=True), + ), + fields=(Field.ACTUATOR_TARGET,), + ), + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata( + data_type=types_v1.DATA_TYPE_UINT16, + ), + ), + fields=(Field.METADATA_DATA_TYPE,), + ), + ] + ), # Remaining responses are actual events. - val_pb2.SubscribeResponse(updates=[ - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Speed', - value=types_pb2.Datapoint( - timestamp=timestamp_pb2.Timestamp( - seconds=1667837912, nanos=247307674), - float=43.0, - ), - ), fields=(Field.VALUE,)), - ]), - val_pb2.SubscribeResponse(updates=[ - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.ADAS.ABS.IsActive', - actuator_target=types_pb2.Datapoint(bool=False), - ), fields=(Field.ACTUATOR_TARGET,)), - ]), - val_pb2.SubscribeResponse(updates=[ - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Chassis.Height', - metadata=types_pb2.Metadata( - data_type=types_pb2.DATA_TYPE_UINT8, - ), - ), fields=(Field.METADATA_DATA_TYPE,)), - ]), + val_v1.SubscribeResponse( + updates=[ + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Speed", + value=types_v1.Datapoint( + timestamp=timestamp_pb2.Timestamp( + seconds=1667837912, nanos=247307674 + ), + float=43.0, + ), + ), + fields=(Field.VALUE,), + ), + ] + ), + val_v1.SubscribeResponse( + updates=[ + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + actuator_target=types_v1.Datapoint(bool=False), + ), + fields=(Field.ACTUATOR_TARGET,), + ), + ] + ), + val_v1.SubscribeResponse( + updates=[ + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Chassis.Height", + metadata=types_v1.Metadata( + data_type=types_v1.DATA_TYPE_UINT8, + ), + ), + fields=(Field.METADATA_DATA_TYPE,), + ), + ] + ), + ) + val_servicer_v1.Subscribe.return_value = ( + response for response in responses ) - val_servicer.Subscribe.return_value = ( - response for response in responses) actual_responses = [] - async for updates in client.subscribe(entries=(entry for entry in ( # generator is intentional (Iterable) - EntryRequest('Vehicle.Speed', - View.CURRENT_VALUE, (Field.VALUE,)), - EntryRequest('Vehicle.ADAS.ABS.IsActive', - View.TARGET_VALUE, (Field.ACTUATOR_TARGET,)), - EntryRequest('Vehicle.Chassis.Height', - View.METADATA, (Field.METADATA_DATA_TYPE,)), - ))): + async for updates in client.subscribe( + entries=( + entry + for entry in ( # generator is intentional (Iterable) + EntryRequest( + "Vehicle.Speed", View.CURRENT_VALUE, (Field.VALUE,) + ), + EntryRequest( + "Vehicle.ADAS.ABS.IsActive", + View.TARGET_VALUE, + (Field.ACTUATOR_TARGET,), + ), + EntryRequest( + "Vehicle.Chassis.Height", + View.METADATA, + (Field.METADATA_DATA_TYPE,), + ), + ) + ) + ): actual_responses.append(updates) assert actual_responses == [ [ - EntryUpdate(entry=DataEntry(path='Vehicle.Speed', value=Datapoint( - value=42.0, - timestamp=datetime.datetime( - 2022, 11, 7, 16, 18, 35, 247307, tzinfo=datetime.timezone.utc), - )), fields=[Field.VALUE]), EntryUpdate( entry=DataEntry( - path='Vehicle.ADAS.ABS.IsActive', actuator_target=Datapoint(value=True)), + path="Vehicle.Speed", + value=Datapoint( + value=42.0, + timestamp=datetime.datetime( + 2022, + 11, + 7, + 16, + 18, + 35, + 247307, + tzinfo=datetime.timezone.utc, + ), + ), + ), + fields=[Field.VALUE], + ), + EntryUpdate( + entry=DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + actuator_target=Datapoint(value=True), + ), + fields=[Field.ACTUATOR_TARGET], + ), + EntryUpdate( + entry=DataEntry( + path="Vehicle.Chassis.Height", + metadata=Metadata( + data_type=DataType.UINT16, + ), + ), + fields=[Field.METADATA_DATA_TYPE], + ), + ], + [ + EntryUpdate( + entry=DataEntry( + path="Vehicle.Speed", + value=Datapoint( + value=43.0, + timestamp=datetime.datetime( + 2022, + 11, + 7, + 16, + 18, + 32, + 247307, + tzinfo=datetime.timezone.utc, + ), + ), + ), + fields=[Field.VALUE], + ) + ], + [ + EntryUpdate( + entry=DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + actuator_target=Datapoint( + value=False, + ), + ), fields=[Field.ACTUATOR_TARGET], + ) + ], + [ + EntryUpdate( + entry=DataEntry( + path="Vehicle.Chassis.Height", + metadata=Metadata( + data_type=DataType.UINT8, + ), + ), + fields=[Field.METADATA_DATA_TYPE], + ) + ], + ] + + @pytest.mark.usefixtures("val_server") + async def test_subscribe_some_entries_v2( + self, mocker, unused_tcp_port, val_servicer_v2 + ): + _entries: Dict[str, types_v2.Datapoint] = { + "Vehicle.Speed": types_v2.Datapoint( + timestamp=timestamp_pb2.Timestamp(seconds=1667837915, nanos=247307674), + value=types_v2.Value(float=42.0), + ), + "Vehicle.ADAS.ABS.IsActive": types_v2.Datapoint( + value=types_v2.Value(bool=True) + ), + } + _entries_2: Dict[str, types_v2.Datapoint] = { + "Vehicle.Speed": types_v2.Datapoint( + timestamp=timestamp_pb2.Timestamp(seconds=1667837912, nanos=247307674), + value=types_v2.Value(float=43.0), + ), + "Vehicle.ADAS.ABS.IsActive": types_v2.Datapoint( + value=types_v2.Value(bool=False) + ), + } + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + responses = ( + val_v2.SubscribeResponse(entries=_entries), + val_v2.SubscribeResponse(entries=_entries_2), + ) + val_servicer_v2.Subscribe.return_value = ( + response for response in responses + ) + + actual_responses = [] + async for updates in client.subscribe( + entries=( + entry + for entry in ( # generator is intentional (Iterable) + EntryRequest( + "Vehicle.Speed", View.CURRENT_VALUE, (Field.VALUE,) + ), + EntryRequest( + "Vehicle.ADAS.ABS.IsActive", + View.CURRENT_VALUE, + (Field.VALUE,), + ), + ) + ), + try_v2=True, + ): + actual_responses.append(updates) + + assert actual_responses == [ + [ + EntryUpdate( + entry=DataEntry( + path="Vehicle.Speed", + value=Datapoint( + value=42.0, + timestamp=datetime.datetime( + 2022, + 11, + 7, + 16, + 18, + 35, + 247307, + tzinfo=datetime.timezone.utc, + ), + ), + ), + fields=[Field.VALUE], + ), + EntryUpdate( + entry=DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + value=Datapoint(value=True), + ), + fields=[Field.VALUE], + ), + ], + [ + EntryUpdate( + entry=DataEntry( + path="Vehicle.Speed", + value=Datapoint( + value=43.0, + timestamp=datetime.datetime( + 2022, + 11, + 7, + 16, + 18, + 32, + 247307, + tzinfo=datetime.timezone.utc, + ), + ), + ), + fields=[Field.VALUE], + ), + EntryUpdate( + entry=DataEntry( + path="Vehicle.ADAS.ABS.IsActive", + value=Datapoint(value=False), + ), + fields=[Field.VALUE], ), - EntryUpdate(entry=DataEntry(path='Vehicle.Chassis.Height', metadata=Metadata( - data_type=DataType.UINT16, - )), fields=[Field.METADATA_DATA_TYPE]) ], - [EntryUpdate(entry=DataEntry(path='Vehicle.Speed', value=Datapoint( - value=43.0, - timestamp=datetime.datetime( - 2022, 11, 7, 16, 18, 32, 247307, tzinfo=datetime.timezone.utc), - )), fields=[Field.VALUE])], - [EntryUpdate(entry=DataEntry(path='Vehicle.ADAS.ABS.IsActive', actuator_target=Datapoint( - value=False, - )), fields=[Field.ACTUATOR_TARGET])], - [EntryUpdate(entry=DataEntry(path='Vehicle.Chassis.Height', metadata=Metadata( - data_type=DataType.UINT8, - )), fields=[Field.METADATA_DATA_TYPE])], ] - @pytest.mark.usefixtures('val_server') - async def test_subscribe_no_entries_requested(self, mocker, unused_tcp_port, val_servicer): - val_servicer.Subscribe.side_effect = generate_error( - grpc.StatusCode.INVALID_ARGUMENT, 'Subscription request must contain at least one entry.', + @pytest.mark.usefixtures("val_server") + async def test_subscribe_no_entries_requested( + self, mocker, unused_tcp_port, val_servicer_v1, val_servicer_v2 + ): + val_servicer_v1.Subscribe.side_effect = generate_error( + grpc.StatusCode.INVALID_ARGUMENT, + "Subscription request must contain at least one entry.", + ) + val_servicer_v2.Subscribe.side_effect = generate_error( + grpc.StatusCode.INVALID_ARGUMENT, + "Subscription request must contain at least one entry.", ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: with pytest.raises(VSSClientError): async for _ in client.subscribe(entries=()): pass - @pytest.mark.usefixtures('val_server') - async def test_subscribe_nonexistent_entries(self, mocker, unused_tcp_port, val_servicer): - val_servicer.Subscribe.side_effect = generate_error( - grpc.StatusCode.INVALID_ARGUMENT, 'NotFound') + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + with pytest.raises(VSSClientError): + async for _ in client.subscribe(entries=(), try_v2=True): + pass + + @pytest.mark.usefixtures("val_server") + async def test_subscribe_nonexistent_entries( + self, mocker, unused_tcp_port, val_servicer_v1, val_servicer_v2 + ): + val_servicer_v1.Subscribe.side_effect = generate_error( + grpc.StatusCode.INVALID_ARGUMENT, "NotFound" + ) + val_servicer_v2.Subscribe.side_effect = generate_error( + grpc.StatusCode.INVALID_ARGUMENT, "NotFound" + ) + async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: with pytest.raises(VSSClientError): async for _ in client.subscribe(entries=(entry for entry in ( # generator is intentional (Iterable) @@ -1216,19 +1842,38 @@ async def test_subscribe_nonexistent_entries(self, mocker, unused_tcp_port, val_ ))): pass - @pytest.mark.usefixtures('val_server') - async def test_get_server_info(self, unused_tcp_port, val_servicer): - val_servicer.GetServerInfo.return_value = val_pb2.GetServerInfoResponse( - name='test_server', version='1.2.3') + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + with pytest.raises(VSSClientError): + async for _ in client.subscribe( + entries=( + entry + for entry in ( # generator is intentional (Iterable) + EntryRequest( + "Does.Not.Exist", View.CURRENT_VALUE, (Field.VALUE,) + ), + ) + ), + try_v2=True, + ): + pass + + @pytest.mark.usefixtures("val_server") + async def test_get_server_info(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.GetServerInfo.return_value = val_v1.GetServerInfoResponse( + name="test_server", version="1.2.3" + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: server_info = await client.get_server_info() assert server_info == ServerInfo( name='test_server', version='1.2.3') - @pytest.mark.usefixtures('val_server') - async def test_get_server_info_unavailable(self, unused_tcp_port, val_servicer): - val_servicer.GetServerInfo.side_effect = generate_error( - grpc.StatusCode.UNAVAILABLE, 'Unavailable') + @pytest.mark.usefixtures("val_server") + async def test_get_server_info_unavailable(self, unused_tcp_port, val_servicer_v1): + val_servicer_v1.GetServerInfo.side_effect = generate_error( + grpc.StatusCode.UNAVAILABLE, "Unavailable" + ) async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: with pytest.raises(VSSClientError): await client.get_server_info() @@ -1236,80 +1881,237 @@ async def test_get_server_info_unavailable(self, unused_tcp_port, val_servicer): @pytest.mark.asyncio class TestSubscriberManager: - @pytest.mark.usefixtures('val_server') - async def test_add_subscriber(self, mocker, unused_tcp_port, val_servicer): + + @pytest.mark.usefixtures("val_server") + async def test_add_subscriber_v1(self, mocker, unused_tcp_port, val_servicer_v1): async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: subscriber_manager = SubscriberManager(client) responses = ( # 1st response is subscription ack - val_pb2.SubscribeResponse(updates=[ - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Speed', - value=types_pb2.Datapoint( - timestamp=timestamp_pb2.Timestamp( - seconds=1667837915, nanos=247307674), - float=42.0, - ), - ), fields=(Field.VALUE,)), - ]), + val_v1.SubscribeResponse( + updates=[ + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Speed", + value=types_v1.Datapoint( + timestamp=timestamp_pb2.Timestamp( + seconds=1667837915, nanos=247307674 + ), + float=42.0, + ), + ), + fields=(Field.VALUE,), + ), + ] + ), # Remaining responses are actual events that should invoke callback. - val_pb2.SubscribeResponse(updates=[ - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Speed', - value=types_pb2.Datapoint( - timestamp=timestamp_pb2.Timestamp( - seconds=1667837912, nanos=247307674), - float=43.0, - ), - ), fields=(Field.VALUE,)), - ]), + val_v1.SubscribeResponse( + updates=[ + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Speed", + value=types_v1.Datapoint( + timestamp=timestamp_pb2.Timestamp( + seconds=1667837912, nanos=247307674 + ), + float=43.0, + ), + ), + fields=(Field.VALUE,), + ), + ] + ), ) callback = mocker.Mock() - val_servicer.Subscribe.return_value = ( - response for response in responses) + val_servicer_v1.Subscribe.return_value = ( + response for response in responses + ) - subscribe_response_stream = client.subscribe(entries=( - EntryRequest('Vehicle.Speed', - View.CURRENT_VALUE, (Field.VALUE,)), - )) - sub_uid = await subscriber_manager.add_subscriber(subscribe_response_stream, callback=callback) + subscribe_response_stream = client.subscribe( + entries=( + EntryRequest("Vehicle.Speed", View.CURRENT_VALUE, (Field.VALUE,)), + ) + ) + sub_uid = await subscriber_manager.add_subscriber( + subscribe_response_stream, callback=callback + ) assert isinstance(sub_uid, uuid.UUID) while callback.call_count < 1: await asyncio.sleep(0.01) - actual_updates = [list(call_args[0][0]) - for call_args in callback.call_args_list] + actual_updates = [ + list(call_args[0][0]) for call_args in callback.call_args_list + ] + assert actual_updates == [ - [EntryUpdate(entry=DataEntry(path='Vehicle.Speed', value=Datapoint( - value=43.0, - timestamp=datetime.datetime( - 2022, 11, 7, 16, 18, 32, 247307, tzinfo=datetime.timezone.utc), - )), fields=[Field.VALUE])], + [ + EntryUpdate( + entry=DataEntry( + path="Vehicle.Speed", + value=Datapoint( + value=43.0, + timestamp=datetime.datetime( + 2022, + 11, + 7, + 16, + 18, + 32, + 247307, + tzinfo=datetime.timezone.utc, + ), + ), + ), + fields=[Field.VALUE], + ) + ], ] - @pytest.mark.usefixtures('val_server') - async def test_remove_subscriber(self, mocker, unused_tcp_port, val_servicer): - async with VSSClient('127.0.0.1', unused_tcp_port, ensure_startup_connection=False) as client: + @pytest.mark.usefixtures("val_server") + async def test_remove_subscriber_v1(self, mocker, unused_tcp_port, val_servicer_v1): + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: subscriber_manager = SubscriberManager(client) responses = ( - val_pb2.SubscribeResponse(updates=[ - val_pb2.EntryUpdate(entry=types_pb2.DataEntry( - path='Vehicle.Speed', - value=types_pb2.Datapoint( - timestamp=timestamp_pb2.Timestamp( - seconds=1667837915, nanos=247307674), - float=42.0, - ), - ), fields=(Field.VALUE,)), - ]), + val_v1.SubscribeResponse( + updates=[ + val_v1.EntryUpdate( + entry=types_v1.DataEntry( + path="Vehicle.Speed", + value=types_v1.Datapoint( + timestamp=timestamp_pb2.Timestamp( + seconds=1667837915, nanos=247307674 + ), + float=42.0, + ), + ), + fields=(Field.VALUE,), + ), + ] + ), + ) + val_servicer_v1.Subscribe.return_value = ( + response for response in responses + ) + subscribe_response_stream = client.subscribe( + entries=( + EntryRequest("Vehicle.Speed", View.CURRENT_VALUE, (Field.VALUE,)), + ) + ) + sub_uid = await subscriber_manager.add_subscriber( + subscribe_response_stream, callback=mocker.Mock() + ) + subscriber = subscriber_manager.subscribers.get(sub_uid) + + await subscriber_manager.remove_subscriber(sub_uid) + + assert subscriber_manager.subscribers.get(sub_uid) is None + assert subscriber.done() + + with pytest.raises(ValueError) as exc_info: + await subscriber_manager.remove_subscriber(sub_uid) + assert ( + exc_info.value.args[0] == f"Could not find subscription {str(sub_uid)}" + ) + + @pytest.mark.usefixtures("val_server") + async def test_add_subscriber_v2(self, mocker, unused_tcp_port, val_servicer_v2): + _entries: Dict[str, types_v2.Datapoint] = { + "Vehicle.Speed": types_v2.Datapoint( + timestamp=timestamp_pb2.Timestamp(seconds=1667837915, nanos=247307674), + value=types_v2.Value(float=42.0), + ), + } + _entries_2: Dict[str, types_v2.Datapoint] = { + "Vehicle.Speed": types_v2.Datapoint( + timestamp=timestamp_pb2.Timestamp(seconds=1667837912, nanos=247307674), + value=types_v2.Value(float=43.0), + ), + } + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + subscriber_manager = SubscriberManager(client) + responses = ( + # 1st response is subscription ack + val_v2.SubscribeResponse(entries=_entries), + # Remaining responses are actual events that should invoke callback. + val_v2.SubscribeResponse(entries=_entries_2), + ) + callback = mocker.Mock() + val_servicer_v2.Subscribe.return_value = ( + response for response in responses + ) + + subscribe_response_stream = client.subscribe( + entries=( + EntryRequest("Vehicle.Speed", View.CURRENT_VALUE, (Field.VALUE,)), + ), + try_v2=True, + ) + sub_uid = await subscriber_manager.add_subscriber( + subscribe_response_stream, callback=callback + ) + + assert isinstance(sub_uid, uuid.UUID) + while callback.call_count < 1: + await asyncio.sleep(0.01) + actual_updates = [ + list(call_args[0][0]) for call_args in callback.call_args_list + ] + + assert actual_updates == [ + [ + EntryUpdate( + entry=DataEntry( + path="Vehicle.Speed", + value=Datapoint( + value=43.0, + timestamp=datetime.datetime( + 2022, + 11, + 7, + 16, + 18, + 32, + 247307, + tzinfo=datetime.timezone.utc, + ), + ), + ), + fields=[Field.VALUE], + ) + ], + ] + + @pytest.mark.usefixtures("val_server") + async def test_remove_subscriber_v2(self, mocker, unused_tcp_port, val_servicer_v2): + async with VSSClient( + "127.0.0.1", unused_tcp_port, ensure_startup_connection=False + ) as client: + subscriber_manager = SubscriberManager(client) + _entries: Dict[str, types_v2.Datapoint] = { + "Vehicle.Speed": types_v2.Datapoint( + timestamp=timestamp_pb2.Timestamp( + seconds=1667837915, nanos=247307674 + ), + value=types_v2.Value(float=42.0), + ), + } + responses = (val_v2.SubscribeResponse(entries=_entries),) + val_servicer_v2.Subscribe.return_value = ( + response for response in responses + ) + subscribe_response_stream = client.subscribe( + entries=( + EntryRequest("Vehicle.Speed", View.CURRENT_VALUE, (Field.VALUE,)), + ), + try_v2=True, + ) + sub_uid = await subscriber_manager.add_subscriber( + subscribe_response_stream, callback=mocker.Mock() ) - val_servicer.Subscribe.return_value = ( - response for response in responses) - subscribe_response_stream = client.subscribe(entries=( - EntryRequest('Vehicle.Speed', - View.CURRENT_VALUE, (Field.VALUE,)), - )) - sub_uid = await subscriber_manager.add_subscriber(subscribe_response_stream, callback=mocker.Mock()) subscriber = subscriber_manager.subscribers.get(sub_uid) await subscriber_manager.remove_subscriber(sub_uid) @@ -1319,4 +2121,6 @@ async def test_remove_subscriber(self, mocker, unused_tcp_port, val_servicer): with pytest.raises(ValueError) as exc_info: await subscriber_manager.remove_subscriber(sub_uid) - assert exc_info.value.args[0] == f"Could not find subscription {str(sub_uid)}" + assert ( + exc_info.value.args[0] == f"Could not find subscription {str(sub_uid)}" + )