Skip to content

Commit

Permalink
Added struct default validation
Browse files Browse the repository at this point in the history
Signed-off-by: Sebastian Schleemilch <[email protected]>
  • Loading branch information
sschleemilch committed Oct 25, 2024
1 parent 038695a commit bb726cc
Show file tree
Hide file tree
Showing 7 changed files with 297 additions and 20 deletions.
185 changes: 183 additions & 2 deletions poetry.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ importlib-metadata = "^7.0"
click = "^8.1.7"
rich-click = "^1.8.3"
pydantic = "^2.8.2"
jsonschema = "^4.23.0"

[tool.poetry.group.dev.dependencies]
mypy = "*"
Expand Down
2 changes: 2 additions & 0 deletions src/vss_tools/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@

# Global objects to be extended by other code parts
dynamic_datatypes: Set[str] = set()
# JSON schemas of structs
struct_schemas: dict[str, dict[str, Any]] = {}
dynamic_quantities: list[str] = []
# This one contains the unit name as well as the list of allowed-datatypes
dynamic_units: dict[str, list] = {}
Expand Down
11 changes: 6 additions & 5 deletions src/vss_tools/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
VSSDataStruct,
get_all_model_fields,
)
from vss_tools.tree import ModelValidationException, VSSNode, build_tree
from vss_tools.tree import ModelValidationException, VSSNode, add_struct_schemas, build_tree
from vss_tools.units_quantities import load_quantities, load_units
from vss_tools.vspec import InvalidSpecDuplicatedEntryException, InvalidSpecException, load_vspec

Expand Down Expand Up @@ -126,10 +126,6 @@ def get_types_root(types: tuple[Path, ...], include_dirs: list[Path]) -> VSSNode
else:
types_root = root

if dynamic_datatypes:
log.info(f"Dynamic datatypes added={len(dynamic_datatypes)}")
log.debug(f"Dynamic datatypes:\n{dynamic_datatypes}")

# Checking whether user defined root types e.g 'MyType'
# instead of 'Types.MyType'
if not all(["." in t for t in dynamic_datatypes]):
Expand All @@ -142,6 +138,11 @@ def get_types_root(types: tuple[Path, ...], include_dirs: list[Path]) -> VSSNode
log.critical(e)
exit(1)

if dynamic_datatypes:
log.info(f"Dynamic datatypes added={len(dynamic_datatypes)}")
log.debug(f"Dynamic datatypes:\n{dynamic_datatypes}")
add_struct_schemas(types_root)

return types_root


Expand Down
56 changes: 45 additions & 11 deletions src/vss_tools/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from enum import Enum
from typing import Any

import jsonschema
from pydantic import (
BaseModel,
ConfigDict,
Expand All @@ -23,11 +24,13 @@
from vss_tools import log
from vss_tools.datatypes import (
Datatypes,
DatatypesException,
dynamic_quantities,
dynamic_units,
get_all_datatypes,
is_array,
resolve_datatype,
struct_schemas,
)

EXPORT_EXCLUDE_ATTRIBUTES = ["delete", "instantiate", "fqn", "arraysize", "aggregate", "is_instance"]
Expand Down Expand Up @@ -180,7 +183,7 @@ class VSSDataDatatype(VSSData):
max: int | float | None = None
unit: str | None = None
allowed: list[str | int | float | bool] | None = None
default: list[str | int | float | bool] | str | int | float | bool | None = None
default: Any = None

@model_validator(mode="after")
def check_type_arraysize_consistency(self) -> Self:
Expand All @@ -192,30 +195,57 @@ def check_type_arraysize_consistency(self) -> Self:
assert is_array(self.datatype), f"'arraysize' set on a non array datatype: '{self.datatype}'"
return self

def check_min_max_valid_datatype(self) -> Self:
if self.min or self.max:
try:
Datatypes.is_subtype_of(self.datatype, Datatypes.NUMERIC[0])
except DatatypesException:
raise ValueError(f"Cannot define min/max for datatype '{self.datatype}'")
if is_array(self.datatype):
raise ValueError("Cannot define min/max for array datatypes")
return self

def check_default_min_max(self) -> Self:
if self.default:
if self.min and self.default < self.min:
raise ValueError(f"'default' smaller than 'min': {self.default}<{self.min}")
if self.max and self.default > self.max:
raise ValueError(f"'default' greater than 'max': {self.default}>{self.min}")
return self

def check_type_default_consistency(self) -> Self:
"""
Checks that the default value
is consistent with the given datatype
"""
if self.default is not None:
if is_array(self.datatype):
array = is_array(self.datatype)
if array:
assert isinstance(
self.default, list
), f"'default' with type '{type(self.default)}' does not match datatype '{self.datatype}'"
if self.arraysize:
assert len(self.default) == self.arraysize, "'default' array size does not match 'arraysize'"
for v in self.default:
assert Datatypes.is_datatype(v, self.datatype), f"'{v}' is not of type '{self.datatype}'"
else:
assert not isinstance(
self.default, list
), f"'default' with type '{type(self.default)}' does not match datatype '{self.datatype}'"
assert Datatypes.is_datatype(
self.default, self.datatype
), f"'{self.default}' is not of type '{self.datatype}'"

check_values = [self.default]
if array:
check_values = self.default

if Datatypes.get_type(self.datatype) is None:
for check_value in check_values:
try:
jsonschema.validate(check_value, struct_schemas[self.datatype.strip("[]")])
except jsonschema.ValidationError as e:
assert False, f"'default' with type '{self.datatype}' is not in the right format: {e}"
else:
for v in check_values:
assert Datatypes.is_datatype(v, self.datatype), f"'{v}' is not of type '{self.datatype}'"
return self

@model_validator(mode="after")
def check_default_values_allowed(self) -> Self:
"""
Checks that the given default values
Expand All @@ -235,6 +265,7 @@ def check_allowed_datatype_consistency(self) -> Self:
datatypes
"""
if self.allowed:
assert Datatypes.get_type(self.datatype), "'allowed' cannot be used with struct datatype"
for v in self.allowed:
assert Datatypes.is_datatype(v, self.datatype), f"'{v}' is not of type '{self.datatype}'"
return self
Expand All @@ -252,8 +283,11 @@ def check_allowed_min_max(self) -> Self:
def check_datatype(self) -> Self:
assert self.datatype in get_all_datatypes(self.fqn), f"'{self.datatype}' is not a valid datatype"
self.datatype = resolve_datatype(self.datatype, self.fqn)
self.check_type_default_consistency()
self.check_allowed_datatype_consistency()
self = self.check_type_default_consistency()
self = self.check_allowed_datatype_consistency()
self = self.check_default_values_allowed()
self = self.check_min_max_valid_datatype()
self = self.check_default_min_max()
return self

@field_validator("unit")
Expand All @@ -271,7 +305,7 @@ def check_datatype_matching_allowed_unit_datatypes(self) -> Self:
referenced in the unit if given
"""
if self.unit:
assert Datatypes.get_type(self.datatype), f"Cannot use 'unit' with complex datatype: '{self.datatype}'"
assert Datatypes.get_type(self.datatype), f"Cannot use 'unit' with struct datatype: '{self.datatype}'"
assert any(
Datatypes.is_subtype_of(self.datatype.rstrip("[]"), a) for a in dynamic_units[self.unit]
), f"'{self.datatype}' is not allowed for unit '{self.unit}'"
Expand Down
60 changes: 59 additions & 1 deletion src/vss_tools/tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,13 @@
from pydantic import ValidationError

from vss_tools import log
from vss_tools.datatypes import Datatypes, dynamic_datatypes
from vss_tools.datatypes import Datatypes, dynamic_datatypes, is_array, struct_schemas
from vss_tools.model import (
ModelValidationException,
VSSData,
VSSDataBranch,
VSSDataDatatype,
VSSDataProperty,
VSSDataStruct,
VSSRaw,
get_vss_raw,
Expand Down Expand Up @@ -530,3 +531,60 @@ def expand_string(s: str) -> list[str]:
for i in range(int(match.group(2)), int(match.group(3)) + 1):
expanded.append(s.replace(match.group(1), str(i)))
return expanded


def add_struct_schemas(types_root: VSSNode):
for node in PreOrderIter(types_root, filter_=lambda n: isinstance(n.data, VSSDataStruct)):
log.info(node)
schema = {
"$schema": "https://json-schema.org/draft/2020-12/schema",
"type": "object",
}
add_node_schema(types_root, node.get_fqn(), schema)
struct_schemas[node.get_fqn()] = schema


def add_node_schema(root: VSSNode, fqn: str, schema: dict[str, Any]) -> None:
datatype_map = {
Datatypes.UINT8[0]: "number",
Datatypes.INT8[0]: "number",
Datatypes.UINT16[0]: "number",
Datatypes.INT16[0]: "number",
Datatypes.UINT32[0]: "number",
Datatypes.INT32[0]: "number",
Datatypes.UINT64[0]: "number",
Datatypes.INT64[0]: "number",
Datatypes.FLOAT[0]: "number",
Datatypes.DOUBLE[0]: "number",
Datatypes.NUMERIC[0]: "number",
Datatypes.BOOLEAN[0]: "boolean",
}

properties: dict[str, Any] = {}
child: VSSNode
node = root.get_node_with_fqn(fqn)
if node:
for child in node.children:
if isinstance(child.data, VSSDataProperty):
array = is_array(child.data.datatype)
input_datatype = child.data.datatype.strip("[]")
datatype: str | None = None
if input_datatype in datatype_map:
datatype = datatype_map[input_datatype]
else:
d = Datatypes.get_type(input_datatype)
if d:
datatype = d[0]
if datatype:
log.debug(f"Datatype: {datatype}")
if array:
properties[child.name] = {"type": "array", "items": {"type": datatype}}
else:
properties[child.name] = {"type": datatype}
# A referenced struct
else:
properties[child.name] = {"type": "object"}
add_node_schema(root, input_datatype, properties[child.name])

schema["required"] = list(properties.keys())
schema["properties"] = properties
2 changes: 1 addition & 1 deletion tests/vspec/test_structs/test_data_type_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def test_error_when_no_user_defined_data_types_are_provided(tmp_path):
(
"test_with_unit_on_struct_signal.vspec",
"VehicleDataTypes.vspec",
"Cannot use 'unit' with complex datatype: 'VehicleDataTypes.TestBranch1.ParentStruct'",
"Cannot use 'unit' with struct datatype: 'VehicleDataTypes.TestBranch1.ParentStruct'",
),
],
)
Expand Down

0 comments on commit bb726cc

Please sign in to comment.