From f3b4862701f96b490e80d07eb4c34d86e744ca38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 10:32:23 +0200 Subject: [PATCH 1/9] Bundle dataclasses library Python 3.6, which is the default Python in RHEL 8, doesn't provide the `dataclasses` module, this module has been introduced in Python 3.8. To make the class work, we will bundle this module. Imported from: https://github.com/ericvsmith/dataclasses/blob/master/dataclasses.py --- openscap_report/dataclasses.py | 1184 ++++++++++++++++++++++++++++++++ 1 file changed, 1184 insertions(+) create mode 100644 openscap_report/dataclasses.py diff --git a/openscap_report/dataclasses.py b/openscap_report/dataclasses.py new file mode 100644 index 00000000..c287c0c6 --- /dev/null +++ b/openscap_report/dataclasses.py @@ -0,0 +1,1184 @@ +import re +import sys +import copy +import types +import inspect +import keyword + +__all__ = ['dataclass', + 'field', + 'Field', + 'FrozenInstanceError', + 'InitVar', + 'MISSING', + + # Helper functions. + 'fields', + 'asdict', + 'astuple', + 'make_dataclass', + 'replace', + 'is_dataclass', + ] + +# Conditions for adding methods. The boxes indicate what action the +# dataclass decorator takes. For all of these tables, when I talk +# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm +# referring to the arguments to the @dataclass decorator. When +# checking if a dunder method already exists, I mean check for an +# entry in the class's __dict__. I never check to see if an attribute +# is defined in a base class. + +# Key: +# +=========+=========================================+ +# + Value | Meaning | +# +=========+=========================================+ +# | | No action: no method is added. | +# +---------+-----------------------------------------+ +# | add | Generated method is added. | +# +---------+-----------------------------------------+ +# | raise | TypeError is raised. | +# +---------+-----------------------------------------+ +# | None | Attribute is set to None. | +# +=========+=========================================+ + +# __init__ +# +# +--- init= parameter +# | +# v | | | +# | no | yes | <--- class has __init__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + +# __repr__ +# +# +--- repr= parameter +# | +# v | | | +# | no | yes | <--- class has __repr__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + + +# __setattr__ +# __delattr__ +# +# +--- frozen= parameter +# | +# v | | | +# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__? +# +=======+=======+=======+ +# | False | | | <- the default +# +-------+-------+-------+ +# | True | add | raise | +# +=======+=======+=======+ +# Raise because not adding these methods would break the "frozen-ness" +# of the class. + +# __eq__ +# +# +--- eq= parameter +# | +# v | | | +# | no | yes | <--- class has __eq__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + +# __lt__ +# __le__ +# __gt__ +# __ge__ +# +# +--- order= parameter +# | +# v | | | +# | no | yes | <--- class has any comparison method in __dict__? +# +=======+=======+=======+ +# | False | | | <- the default +# +-------+-------+-------+ +# | True | add | raise | +# +=======+=======+=======+ +# Raise because to allow this case would interfere with using +# functools.total_ordering. + +# __hash__ + +# +------------------- unsafe_hash= parameter +# | +----------- eq= parameter +# | | +--- frozen= parameter +# | | | +# v v v | | | +# | no | yes | <--- class has explicitly defined __hash__ +# +=======+=======+=======+========+========+ +# | False | False | False | | | No __eq__, use the base class __hash__ +# +-------+-------+-------+--------+--------+ +# | False | False | True | | | No __eq__, use the base class __hash__ +# +-------+-------+-------+--------+--------+ +# | False | True | False | None | | <-- the default, not hashable +# +-------+-------+-------+--------+--------+ +# | False | True | True | add | | Frozen, so hashable, allows override +# +-------+-------+-------+--------+--------+ +# | True | False | False | add | raise | Has no __eq__, but hashable +# +-------+-------+-------+--------+--------+ +# | True | False | True | add | raise | Has no __eq__, but hashable +# +-------+-------+-------+--------+--------+ +# | True | True | False | add | raise | Not frozen, but hashable +# +-------+-------+-------+--------+--------+ +# | True | True | True | add | raise | Frozen, so hashable +# +=======+=======+=======+========+========+ +# For boxes that are blank, __hash__ is untouched and therefore +# inherited from the base class. If the base is object, then +# id-based hashing is used. +# +# Note that a class may already have __hash__=None if it specified an +# __eq__ method in the class body (not one that was created by +# @dataclass). +# +# See _hash_action (below) for a coded version of this table. + + +# Raised when an attempt is made to modify a frozen class. +class FrozenInstanceError(AttributeError): pass + +# A sentinel object for default values to signal that a default +# factory will be used. This is given a nice repr() which will appear +# in the function signature of dataclasses' constructors. +class _HAS_DEFAULT_FACTORY_CLASS: + def __repr__(self): + return '' +_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS() + +# A sentinel object to detect if a parameter is supplied or not. Use +# a class to give it a better repr. +class _MISSING_TYPE: + pass +MISSING = _MISSING_TYPE() + +# Since most per-field metadata will be unused, create an empty +# read-only proxy that can be shared among all fields. +_EMPTY_METADATA = types.MappingProxyType({}) + +# Markers for the various kinds of fields and pseudo-fields. +class _FIELD_BASE: + def __init__(self, name): + self.name = name + def __repr__(self): + return self.name +_FIELD = _FIELD_BASE('_FIELD') +_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR') +_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR') + +# The name of an attribute on the class where we store the Field +# objects. Also used to check if a class is a Data Class. +_FIELDS = '__dataclass_fields__' + +# The name of an attribute on the class that stores the parameters to +# @dataclass. +_PARAMS = '__dataclass_params__' + +# The name of the function, that if it exists, is called at the end of +# __init__. +_POST_INIT_NAME = '__post_init__' + +# String regex that string annotations for ClassVar or InitVar must match. +# Allows "identifier.identifier[" or "identifier[". +# https://bugs.python.org/issue33453 for details. +_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)') + +class _InitVarMeta(type): + def __getitem__(self, params): + return self + +class InitVar(metaclass=_InitVarMeta): + pass + + +# Instances of Field are only ever created from within this module, +# and only from the field() function, although Field instances are +# exposed externally as (conceptually) read-only objects. +# +# name and type are filled in after the fact, not in __init__. +# They're not known at the time this class is instantiated, but it's +# convenient if they're available later. +# +# When cls._FIELDS is filled in with a list of Field objects, the name +# and type fields will have been populated. +class Field: + __slots__ = ('name', + 'type', + 'default', + 'default_factory', + 'repr', + 'hash', + 'init', + 'compare', + 'metadata', + '_field_type', # Private: not to be used by user code. + ) + + def __init__(self, default, default_factory, init, repr, hash, compare, + metadata): + self.name = None + self.type = None + self.default = default + self.default_factory = default_factory + self.init = init + self.repr = repr + self.hash = hash + self.compare = compare + self.metadata = (_EMPTY_METADATA + if metadata is None or len(metadata) == 0 else + types.MappingProxyType(metadata)) + self._field_type = None + + def __repr__(self): + return ('Field(' + f'name={self.name!r},' + f'type={self.type!r},' + f'default={self.default!r},' + f'default_factory={self.default_factory!r},' + f'init={self.init!r},' + f'repr={self.repr!r},' + f'hash={self.hash!r},' + f'compare={self.compare!r},' + f'metadata={self.metadata!r},' + f'_field_type={self._field_type}' + ')') + + # This is used to support the PEP 487 __set_name__ protocol in the + # case where we're using a field that contains a descriptor as a + # default value. For details on __set_name__, see + # https://www.python.org/dev/peps/pep-0487/#implementation-details. + # + # Note that in _process_class, this Field object is overwritten + # with the default value, so the end result is a descriptor that + # had __set_name__ called on it at the right time. + def __set_name__(self, owner, name): + func = getattr(type(self.default), '__set_name__', None) + if func: + # There is a __set_name__ method on the descriptor, call + # it. + func(self.default, owner, name) + + +class _DataclassParams: + __slots__ = ('init', + 'repr', + 'eq', + 'order', + 'unsafe_hash', + 'frozen', + ) + + def __init__(self, init, repr, eq, order, unsafe_hash, frozen): + self.init = init + self.repr = repr + self.eq = eq + self.order = order + self.unsafe_hash = unsafe_hash + self.frozen = frozen + + def __repr__(self): + return ('_DataclassParams(' + f'init={self.init!r},' + f'repr={self.repr!r},' + f'eq={self.eq!r},' + f'order={self.order!r},' + f'unsafe_hash={self.unsafe_hash!r},' + f'frozen={self.frozen!r}' + ')') + + +# This function is used instead of exposing Field creation directly, +# so that a type checker can be told (via overloads) that this is a +# function whose type depends on its parameters. +def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, + hash=None, compare=True, metadata=None): + """Return an object to identify dataclass fields. + + default is the default value of the field. default_factory is a + 0-argument function called to initialize a field's value. If init + is True, the field will be a parameter to the class's __init__() + function. If repr is True, the field will be included in the + object's repr(). If hash is True, the field will be included in + the object's hash(). If compare is True, the field will be used + in comparison functions. metadata, if specified, must be a + mapping which is stored but not otherwise examined by dataclass. + + It is an error to specify both default and default_factory. + """ + + if default is not MISSING and default_factory is not MISSING: + raise ValueError('cannot specify both default and default_factory') + return Field(default, default_factory, init, repr, hash, compare, + metadata) + + +def _tuple_str(obj_name, fields): + # Return a string representing each field of obj_name as a tuple + # member. So, if fields is ['x', 'y'] and obj_name is "self", + # return "(self.x,self.y)". + + # Special case for the 0-tuple. + if not fields: + return '()' + # Note the trailing comma, needed if this turns out to be a 1-tuple. + return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)' + + +def _create_fn(name, args, body, *, globals=None, locals=None, + return_type=MISSING): + # Note that we mutate locals when exec() is called. Caller + # beware! The only callers are internal to this module, so no + # worries about external callers. + if locals is None: + locals = {} + return_annotation = '' + if return_type is not MISSING: + locals['_return_type'] = return_type + return_annotation = '->_return_type' + args = ','.join(args) + body = '\n'.join(f' {b}' for b in body) + + # Compute the text of the entire function. + txt = f'def {name}({args}){return_annotation}:\n{body}' + + exec(txt, globals, locals) + return locals[name] + + +def _field_assign(frozen, name, value, self_name): + # If we're a frozen class, then assign to our fields in __init__ + # via object.__setattr__. Otherwise, just use a simple + # assignment. + # + # self_name is what "self" is called in this function: don't + # hard-code "self", since that might be a field name. + if frozen: + return f'object.__setattr__({self_name},{name!r},{value})' + return f'{self_name}.{name}={value}' + + +def _field_init(f, frozen, globals, self_name): + # Return the text of the line in the body of __init__ that will + # initialize this field. + + default_name = f'_dflt_{f.name}' + if f.default_factory is not MISSING: + if f.init: + # This field has a default factory. If a parameter is + # given, use it. If not, call the factory. + globals[default_name] = f.default_factory + value = (f'{default_name}() ' + f'if {f.name} is _HAS_DEFAULT_FACTORY ' + f'else {f.name}') + else: + # This is a field that's not in the __init__ params, but + # has a default factory function. It needs to be + # initialized here by calling the factory function, + # because there's no other way to initialize it. + + # For a field initialized with a default=defaultvalue, the + # class dict just has the default value + # (cls.fieldname=defaultvalue). But that won't work for a + # default factory, the factory must be called in __init__ + # and we must assign that to self.fieldname. We can't + # fall back to the class dict's value, both because it's + # not set, and because it might be different per-class + # (which, after all, is why we have a factory function!). + + globals[default_name] = f.default_factory + value = f'{default_name}()' + else: + # No default factory. + if f.init: + if f.default is MISSING: + # There's no default, just do an assignment. + value = f.name + elif f.default is not MISSING: + globals[default_name] = f.default + value = f.name + else: + # This field does not need initialization. Signify that + # to the caller by returning None. + return None + + # Only test this now, so that we can create variables for the + # default. However, return None to signify that we're not going + # to actually do the assignment statement for InitVars. + if f._field_type is _FIELD_INITVAR: + return None + + # Now, actually generate the field assignment. + return _field_assign(frozen, f.name, value, self_name) + + +def _init_param(f): + # Return the __init__ parameter string for this field. For + # example, the equivalent of 'x:int=3' (except instead of 'int', + # reference a variable set to int, and instead of '3', reference a + # variable set to 3). + if f.default is MISSING and f.default_factory is MISSING: + # There's no default, and no default_factory, just output the + # variable name and type. + default = '' + elif f.default is not MISSING: + # There's a default, this will be the name that's used to look + # it up. + default = f'=_dflt_{f.name}' + elif f.default_factory is not MISSING: + # There's a factory function. Set a marker. + default = '=_HAS_DEFAULT_FACTORY' + return f'{f.name}:_type_{f.name}{default}' + + +def _init_fn(fields, frozen, has_post_init, self_name): + # fields contains both real fields and InitVar pseudo-fields. + + # Make sure we don't have fields without defaults following fields + # with defaults. This actually would be caught when exec-ing the + # function source code, but catching it here gives a better error + # message, and future-proofs us in case we build up the function + # using ast. + seen_default = False + for f in fields: + # Only consider fields in the __init__ call. + if f.init: + if not (f.default is MISSING and f.default_factory is MISSING): + seen_default = True + elif seen_default: + raise TypeError(f'non-default argument {f.name!r} ' + 'follows default argument') + + globals = {'MISSING': MISSING, + '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY} + + body_lines = [] + for f in fields: + line = _field_init(f, frozen, globals, self_name) + # line is None means that this field doesn't require + # initialization (it's a pseudo-field). Just skip it. + if line: + body_lines.append(line) + + # Does this class have a post-init function? + if has_post_init: + params_str = ','.join(f.name for f in fields + if f._field_type is _FIELD_INITVAR) + body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})') + + # If no body lines, use 'pass'. + if not body_lines: + body_lines = ['pass'] + + locals = {f'_type_{f.name}': f.type for f in fields} + return _create_fn('__init__', + [self_name] + [_init_param(f) for f in fields if f.init], + body_lines, + locals=locals, + globals=globals, + return_type=None) + + +def _repr_fn(fields): + return _create_fn('__repr__', + ('self',), + ['return self.__class__.__qualname__ + f"(' + + ', '.join([f"{f.name}={{self.{f.name}!r}}" + for f in fields]) + + ')"']) + + +def _frozen_get_del_attr(cls, fields): + # XXX: globals is modified on the first call to _create_fn, then + # the modified version is used in the second call. Is this okay? + globals = {'cls': cls, + 'FrozenInstanceError': FrozenInstanceError} + if fields: + fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)' + else: + # Special case for the zero-length tuple. + fields_str = '()' + return (_create_fn('__setattr__', + ('self', 'name', 'value'), + (f'if type(self) is cls or name in {fields_str}:', + ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', + f'super(cls, self).__setattr__(name, value)'), + globals=globals), + _create_fn('__delattr__', + ('self', 'name'), + (f'if type(self) is cls or name in {fields_str}:', + ' raise FrozenInstanceError(f"cannot delete field {name!r}")', + f'super(cls, self).__delattr__(name)'), + globals=globals), + ) + + +def _cmp_fn(name, op, self_tuple, other_tuple): + # Create a comparison function. If the fields in the object are + # named 'x' and 'y', then self_tuple is the string + # '(self.x,self.y)' and other_tuple is the string + # '(other.x,other.y)'. + + return _create_fn(name, + ('self', 'other'), + [ 'if other.__class__ is self.__class__:', + f' return {self_tuple}{op}{other_tuple}', + 'return NotImplemented']) + + +def _hash_fn(fields): + self_tuple = _tuple_str('self', fields) + return _create_fn('__hash__', + ('self',), + [f'return hash({self_tuple})']) + + +def _is_classvar(a_type, typing): + # This test uses a typing internal class, but it's the best way to + # test if this is a ClassVar. + return type(a_type) is typing._ClassVar + + +def _is_initvar(a_type, dataclasses): + # The module we're checking against is the module we're + # currently in (dataclasses.py). + return a_type is dataclasses.InitVar + + +def _is_type(annotation, cls, a_module, a_type, is_type_predicate): + # Given a type annotation string, does it refer to a_type in + # a_module? For example, when checking that annotation denotes a + # ClassVar, then a_module is typing, and a_type is + # typing.ClassVar. + + # It's possible to look up a_module given a_type, but it involves + # looking in sys.modules (again!), and seems like a waste since + # the caller already knows a_module. + + # - annotation is a string type annotation + # - cls is the class that this annotation was found in + # - a_module is the module we want to match + # - a_type is the type in that module we want to match + # - is_type_predicate is a function called with (obj, a_module) + # that determines if obj is of the desired type. + + # Since this test does not do a local namespace lookup (and + # instead only a module (global) lookup), there are some things it + # gets wrong. + + # With string annotations, cv0 will be detected as a ClassVar: + # CV = ClassVar + # @dataclass + # class C0: + # cv0: CV + + # But in this example cv1 will not be detected as a ClassVar: + # @dataclass + # class C1: + # CV = ClassVar + # cv1: CV + + # In C1, the code in this function (_is_type) will look up "CV" in + # the module and not find it, so it will not consider cv1 as a + # ClassVar. This is a fairly obscure corner case, and the best + # way to fix it would be to eval() the string "CV" with the + # correct global and local namespaces. However that would involve + # a eval() penalty for every single field of every dataclass + # that's defined. It was judged not worth it. + + match = _MODULE_IDENTIFIER_RE.match(annotation) + if match: + ns = None + module_name = match.group(1) + if not module_name: + # No module name, assume the class's module did + # "from .dataclasses import InitVar". + ns = sys.modules.get(cls.__module__).__dict__ + else: + # Look up module_name in the class's module. + module = sys.modules.get(cls.__module__) + if module and module.__dict__.get(module_name) is a_module: + ns = sys.modules.get(a_type.__module__).__dict__ + if ns and is_type_predicate(ns.get(match.group(2)), a_module): + return True + return False + + +def _get_field(cls, a_name, a_type): + # Return a Field object for this field name and type. ClassVars + # and InitVars are also returned, but marked as such (see + # f._field_type). + + # If the default value isn't derived from Field, then it's only a + # normal default value. Convert it to a Field(). + default = getattr(cls, a_name, MISSING) + if isinstance(default, Field): + f = default + else: + if isinstance(default, types.MemberDescriptorType): + # This is a field in __slots__, so it has no default value. + default = MISSING + f = field(default=default) + + # Only at this point do we know the name and the type. Set them. + f.name = a_name + f.type = a_type + + # Assume it's a normal field until proven otherwise. We're next + # going to decide if it's a ClassVar or InitVar, everything else + # is just a normal field. + f._field_type = _FIELD + + # In addition to checking for actual types here, also check for + # string annotations. get_type_hints() won't always work for us + # (see https://github.com/python/typing/issues/508 for example), + # plus it's expensive and would require an eval for every string + # annotation. So, make a best effort to see if this is a ClassVar + # or InitVar using regex's and checking that the thing referenced + # is actually of the correct type. + + # For the complete discussion, see https://bugs.python.org/issue33453 + + # If typing has not been imported, then it's impossible for any + # annotation to be a ClassVar. So, only look for ClassVar if + # typing has been imported by any module (not necessarily cls's + # module). + typing = sys.modules.get('typing') + if typing: + if (_is_classvar(a_type, typing) + or (isinstance(f.type, str) + and _is_type(f.type, cls, typing, typing.ClassVar, + _is_classvar))): + f._field_type = _FIELD_CLASSVAR + + # If the type is InitVar, or if it's a matching string annotation, + # then it's an InitVar. + if f._field_type is _FIELD: + # The module we're checking against is the module we're + # currently in (dataclasses.py). + dataclasses = sys.modules[__name__] + if (_is_initvar(a_type, dataclasses) + or (isinstance(f.type, str) + and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, + _is_initvar))): + f._field_type = _FIELD_INITVAR + + # Validations for individual fields. This is delayed until now, + # instead of in the Field() constructor, since only here do we + # know the field name, which allows for better error reporting. + + # Special restrictions for ClassVar and InitVar. + if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR): + if f.default_factory is not MISSING: + raise TypeError(f'field {f.name} cannot have a ' + 'default factory') + # Should I check for other field settings? default_factory + # seems the most serious to check for. Maybe add others. For + # example, how about init=False (or really, + # init=)? It makes no sense for + # ClassVar and InitVar to specify init=. + + # For real fields, disallow mutable defaults for known types. + if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): + raise ValueError(f'mutable default {type(f.default)} for field ' + f'{f.name} is not allowed: use default_factory') + + return f + + +def _set_new_attribute(cls, name, value): + # Never overwrites an existing attribute. Returns True if the + # attribute already exists. + if name in cls.__dict__: + return True + setattr(cls, name, value) + return False + + +# Decide if/how we're going to create a hash function. Key is +# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to +# take. The common case is to do nothing, so instead of providing a +# function that is a no-op, use None to signify that. + +def _hash_set_none(cls, fields): + return None + +def _hash_add(cls, fields): + flds = [f for f in fields if (f.compare if f.hash is None else f.hash)] + return _hash_fn(flds) + +def _hash_exception(cls, fields): + # Raise an exception. + raise TypeError(f'Cannot overwrite attribute __hash__ ' + f'in class {cls.__name__}') + +# +# +-------------------------------------- unsafe_hash? +# | +------------------------------- eq? +# | | +------------------------ frozen? +# | | | +---------------- has-explicit-hash? +# | | | | +# | | | | +------- action +# | | | | | +# v v v v v +_hash_action = {(False, False, False, False): None, + (False, False, False, True ): None, + (False, False, True, False): None, + (False, False, True, True ): None, + (False, True, False, False): _hash_set_none, + (False, True, False, True ): None, + (False, True, True, False): _hash_add, + (False, True, True, True ): None, + (True, False, False, False): _hash_add, + (True, False, False, True ): _hash_exception, + (True, False, True, False): _hash_add, + (True, False, True, True ): _hash_exception, + (True, True, False, False): _hash_add, + (True, True, False, True ): _hash_exception, + (True, True, True, False): _hash_add, + (True, True, True, True ): _hash_exception, + } +# See https://bugs.python.org/issue32929#msg312829 for an if-statement +# version of this table. + + +def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): + # Now that dicts retain insertion order, there's no reason to use + # an ordered dict. I am leveraging that ordering here, because + # derived class fields overwrite base class fields, but the order + # is defined by the base class, which is found first. + fields = {} + + setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order, + unsafe_hash, frozen)) + + # Find our base classes in reverse MRO order, and exclude + # ourselves. In reversed order so that more derived classes + # override earlier field definitions in base classes. As long as + # we're iterating over them, see if any are frozen. + any_frozen_base = False + has_dataclass_bases = False + for b in cls.__mro__[-1:0:-1]: + # Only process classes that have been processed by our + # decorator. That is, they have a _FIELDS attribute. + base_fields = getattr(b, _FIELDS, None) + if base_fields: + has_dataclass_bases = True + for f in base_fields.values(): + fields[f.name] = f + if getattr(b, _PARAMS).frozen: + any_frozen_base = True + + # Annotations that are defined in this class (not in base + # classes). If __annotations__ isn't present, then this class + # adds no new annotations. We use this to compute fields that are + # added by this class. + # + # Fields are found from cls_annotations, which is guaranteed to be + # ordered. Default values are from class attributes, if a field + # has a default. If the default value is a Field(), then it + # contains additional info beyond (and possibly including) the + # actual default value. Pseudo-fields ClassVars and InitVars are + # included, despite the fact that they're not real fields. That's + # dealt with later. + cls_annotations = cls.__dict__.get('__annotations__', {}) + + # Now find fields in our class. While doing so, validate some + # things, and set the default values (as class attributes) where + # we can. + cls_fields = [_get_field(cls, name, type) + for name, type in cls_annotations.items()] + for f in cls_fields: + fields[f.name] = f + + # If the class attribute (which is the default value for this + # field) exists and is of type 'Field', replace it with the + # real default. This is so that normal class introspection + # sees a real default value, not a Field. + if isinstance(getattr(cls, f.name, None), Field): + if f.default is MISSING: + # If there's no default, delete the class attribute. + # This happens if we specify field(repr=False), for + # example (that is, we specified a field object, but + # no default value). Also if we're using a default + # factory. The class attribute should not be set at + # all in the post-processed class. + delattr(cls, f.name) + else: + setattr(cls, f.name, f.default) + + # Do we have any Field members that don't also have annotations? + for name, value in cls.__dict__.items(): + if isinstance(value, Field) and not name in cls_annotations: + raise TypeError(f'{name!r} is a field but has no type annotation') + + # Check rules that apply if we are derived from any dataclasses. + if has_dataclass_bases: + # Raise an exception if any of our bases are frozen, but we're not. + if any_frozen_base and not frozen: + raise TypeError('cannot inherit non-frozen dataclass from a ' + 'frozen one') + + # Raise an exception if we're frozen, but none of our bases are. + if not any_frozen_base and frozen: + raise TypeError('cannot inherit frozen dataclass from a ' + 'non-frozen one') + + # Remember all of the fields on our class (including bases). This + # also marks this class as being a dataclass. + setattr(cls, _FIELDS, fields) + + # Was this class defined with an explicit __hash__? Note that if + # __eq__ is defined in this class, then python will automatically + # set __hash__ to None. This is a heuristic, as it's possible + # that such a __hash__ == None was not auto-generated, but it + # close enough. + class_hash = cls.__dict__.get('__hash__', MISSING) + has_explicit_hash = not (class_hash is MISSING or + (class_hash is None and '__eq__' in cls.__dict__)) + + # If we're generating ordering methods, we must be generating the + # eq methods. + if order and not eq: + raise ValueError('eq must be true if order is true') + + if init: + # Does this class have a post-init function? + has_post_init = hasattr(cls, _POST_INIT_NAME) + + # Include InitVars and regular fields (so, not ClassVars). + flds = [f for f in fields.values() + if f._field_type in (_FIELD, _FIELD_INITVAR)] + _set_new_attribute(cls, '__init__', + _init_fn(flds, + frozen, + has_post_init, + # The name to use for the "self" + # param in __init__. Use "self" + # if possible. + '__dataclass_self__' if 'self' in fields + else 'self', + )) + + # Get the fields as a list, and include only real fields. This is + # used in all of the following methods. + field_list = [f for f in fields.values() if f._field_type is _FIELD] + + if repr: + flds = [f for f in field_list if f.repr] + _set_new_attribute(cls, '__repr__', _repr_fn(flds)) + + if eq: + # Create _eq__ method. There's no need for a __ne__ method, + # since python will call __eq__ and negate it. + flds = [f for f in field_list if f.compare] + self_tuple = _tuple_str('self', flds) + other_tuple = _tuple_str('other', flds) + _set_new_attribute(cls, '__eq__', + _cmp_fn('__eq__', '==', + self_tuple, other_tuple)) + + if order: + # Create and set the ordering methods. + flds = [f for f in field_list if f.compare] + self_tuple = _tuple_str('self', flds) + other_tuple = _tuple_str('other', flds) + for name, op in [('__lt__', '<'), + ('__le__', '<='), + ('__gt__', '>'), + ('__ge__', '>='), + ]: + if _set_new_attribute(cls, name, + _cmp_fn(name, op, self_tuple, other_tuple)): + raise TypeError(f'Cannot overwrite attribute {name} ' + f'in class {cls.__name__}. Consider using ' + 'functools.total_ordering') + + if frozen: + for fn in _frozen_get_del_attr(cls, field_list): + if _set_new_attribute(cls, fn.__name__, fn): + raise TypeError(f'Cannot overwrite attribute {fn.__name__} ' + f'in class {cls.__name__}') + + # Decide if/how we're going to create a hash function. + hash_action = _hash_action[bool(unsafe_hash), + bool(eq), + bool(frozen), + has_explicit_hash] + if hash_action: + # No need to call _set_new_attribute here, since by the time + # we're here the overwriting is unconditional. + cls.__hash__ = hash_action(cls, field_list) + + if not getattr(cls, '__doc__'): + # Create a class doc-string. + cls.__doc__ = (cls.__name__ + + str(inspect.signature(cls)).replace(' -> None', '')) + + return cls + + +# _cls should never be specified by keyword, so start it with an +# underscore. The presence of _cls is used to detect if this +# decorator is being called with parameters or not. +def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False, + unsafe_hash=False, frozen=False): + """Returns the same class as was passed in, with dunder methods + added based on the fields defined in the class. + + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If + repr is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method function is added. If frozen is true, fields may + not be assigned to after instance creation. + """ + + def wrap(cls): + return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen) + + # See if we're being called as @dataclass or @dataclass(). + if _cls is None: + # We're called with parens. + return wrap + + # We're called as @dataclass without parens. + return wrap(_cls) + + +def fields(class_or_instance): + """Return a tuple describing the fields of this dataclass. + + Accepts a dataclass or an instance of one. Tuple elements are of + type Field. + """ + + # Might it be worth caching this, per class? + try: + fields = getattr(class_or_instance, _FIELDS) + except AttributeError: + raise TypeError('must be called with a dataclass type or instance') + + # Exclude pseudo-fields. Note that fields is sorted by insertion + # order, so the order of the tuple is as the fields were defined. + return tuple(f for f in fields.values() if f._field_type is _FIELD) + + +def _is_dataclass_instance(obj): + """Returns True if obj is an instance of a dataclass.""" + return not isinstance(obj, type) and hasattr(obj, _FIELDS) + + +def is_dataclass(obj): + """Returns True if obj is a dataclass or an instance of a + dataclass.""" + return hasattr(obj, _FIELDS) + + +def asdict(obj, *, dict_factory=dict): + """Return the fields of a dataclass instance as a new dictionary mapping + field names to field values. + + Example usage: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert asdict(c) == {'x': 1, 'y': 2} + + If given, 'dict_factory' will be used instead of built-in dict. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. + """ + if not _is_dataclass_instance(obj): + raise TypeError("asdict() should be called on dataclass instances") + return _asdict_inner(obj, dict_factory) + + +def _asdict_inner(obj, dict_factory): + if _is_dataclass_instance(obj): + result = [] + for f in fields(obj): + value = _asdict_inner(getattr(obj, f.name), dict_factory) + result.append((f.name, value)) + return dict_factory(result) + elif isinstance(obj, (list, tuple)): + return type(obj)(_asdict_inner(v, dict_factory) for v in obj) + elif isinstance(obj, dict): + return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory)) + for k, v in obj.items()) + else: + return copy.deepcopy(obj) + + +def astuple(obj, *, tuple_factory=tuple): + """Return the fields of a dataclass instance as a new tuple of field values. + + Example usage:: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert astuple(c) == (1, 2) + + If given, 'tuple_factory' will be used instead of built-in tuple. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. + """ + + if not _is_dataclass_instance(obj): + raise TypeError("astuple() should be called on dataclass instances") + return _astuple_inner(obj, tuple_factory) + + +def _astuple_inner(obj, tuple_factory): + if _is_dataclass_instance(obj): + result = [] + for f in fields(obj): + value = _astuple_inner(getattr(obj, f.name), tuple_factory) + result.append(value) + return tuple_factory(result) + elif isinstance(obj, (list, tuple)): + return type(obj)(_astuple_inner(v, tuple_factory) for v in obj) + elif isinstance(obj, dict): + return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory)) + for k, v in obj.items()) + else: + return copy.deepcopy(obj) + + +def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True, + repr=True, eq=True, order=False, unsafe_hash=False, + frozen=False): + """Return a new dynamically created dataclass. + + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'. + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + + is equivalent to: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + + For the bases and namespace parameters, see the builtin type() function. + + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ + + if namespace is None: + namespace = {} + else: + # Copy namespace since we're going to mutate it. + namespace = namespace.copy() + + # While we're looking through the field names, validate that they + # are identifiers, are not keywords, and not duplicates. + seen = set() + anns = {} + for item in fields: + if isinstance(item, str): + name = item + tp = 'typing.Any' + elif len(item) == 2: + name, tp, = item + elif len(item) == 3: + name, tp, spec = item + namespace[name] = spec + else: + raise TypeError(f'Invalid field: {item!r}') + + if not isinstance(name, str) or not name.isidentifier(): + raise TypeError(f'Field names must be valid identifiers: {name!r}') + if keyword.iskeyword(name): + raise TypeError(f'Field names must not be keywords: {name!r}') + if name in seen: + raise TypeError(f'Field name duplicated: {name!r}') + + seen.add(name) + anns[name] = tp + + namespace['__annotations__'] = anns + # We use `types.new_class()` instead of simply `type()` to allow dynamic creation + # of generic dataclassses. + cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace)) + return dataclass(cls, init=init, repr=repr, eq=eq, order=order, + unsafe_hash=unsafe_hash, frozen=frozen) + + +def replace(obj, **changes): + """Return a new object replacing specified fields with new values. + + This is especially useful for frozen classes. Example usage: + + @dataclass(frozen=True) + class C: + x: int + y: int + + c = C(1, 2) + c1 = replace(c, x=3) + assert c1.x == 3 and c1.y == 2 + """ + + # We're going to mutate 'changes', but that's okay because it's a + # new dict, even if called with 'replace(obj, **my_changes)'. + + if not _is_dataclass_instance(obj): + raise TypeError("replace() should be called on dataclass instances") + + # It's an error to have init=False fields in 'changes'. + # If a field is not in 'changes', read its value from the provided obj. + + for f in getattr(obj, _FIELDS).values(): + # Only consider normal fields or InitVars. + if f._field_type is _FIELD_CLASSVAR: + continue + + if not f.init: + # Error if this field is specified in changes. + if f.name in changes: + raise ValueError(f'field {f.name} is declared with ' + 'init=False, it cannot be specified with ' + 'replace()') + continue + + if f.name not in changes: + if f._field_type is _FIELD_INITVAR: + raise ValueError(f"InitVar {f.name!r} " + 'must be specified with replace()') + changes[f.name] = getattr(obj, f.name) + + # Create the new object, which calls __init__() and + # __post_init__() (if defined), using all of the init fields we've + # added and/or left in 'changes'. If there are values supplied in + # changes that aren't fields, this will correctly raise a + # TypeError. + return obj.__class__(**changes) From 597be6844ae93b415acf438702ae704456cb241c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 10:48:25 +0200 Subject: [PATCH 2/9] Make openscap-report compatible with Python 3.6 This commit introduces changes necessary to make our code compatbile with Python 3.6, the default Python version shipped in RHEL 8. The most important changes are: - using bundled dataclasses if not available - do not use contextlib.nullcontext - do not use importlib.metadata --- docs/manual/installation.rst | 2 +- openscap_report/__init__.py | 17 +++++++++++------ openscap_report/debug_settings.py | 5 ++++- .../data_structures/cpe_logical_test.py | 5 ++++- .../data_structures/cpe_platform.py | 5 ++++- .../data_structures/group.py | 5 ++++- .../data_structures/identifier.py | 5 ++++- .../data_structures/oval_definition.py | 5 ++++- .../data_structures/oval_node.py | 5 ++++- .../data_structures/oval_object.py | 5 ++++- .../data_structures/oval_reference.py | 5 ++++- .../data_structures/oval_state.py | 5 ++++- .../data_structures/oval_test.py | 5 ++++- .../data_structures/oval_variable.py | 5 ++++- .../data_structures/profile_info.py | 5 ++++- .../data_structures/reference.py | 5 ++++- .../data_structures/remediation.py | 5 ++++- .../data_structures/report.py | 5 ++++- .../data_structures/result_of_scan.py | 5 ++++- .../scap_results_parser/data_structures/rule.py | 5 ++++- .../data_structures/warning.py | 5 ++++- .../parsers/oval_result_parser.py | 5 ++++- .../scap_results_parser/parsers/rule_parser.py | 5 ++++- setup.py | 2 +- tests/test_utils.py | 5 ++++- tests/unit_tests/test_scap_result_parser.py | 12 +++++++++++- tox.ini | 3 +-- 27 files changed, 113 insertions(+), 33 deletions(-) diff --git a/docs/manual/installation.rst b/docs/manual/installation.rst index 7a2af172..c6c256ac 100644 --- a/docs/manual/installation.rst +++ b/docs/manual/installation.rst @@ -56,7 +56,7 @@ Installation from source Requirements: -* Python 3.8+ +* Python 3.6+ * `lxml`_ * `jinja2`_ diff --git a/openscap_report/__init__.py b/openscap_report/__init__.py index be7fcf72..e23a8a1e 100644 --- a/openscap_report/__init__.py +++ b/openscap_report/__init__.py @@ -1,11 +1,16 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from importlib.metadata import PackageNotFoundError, version -from os import path - DISTRIBUTION_NAME = "openscap-report" try: - __version__ = version(DISTRIBUTION_NAME) -except PackageNotFoundError: - __version__ = f"Version is unavailable. Please install {DISTRIBUTION_NAME}!" + from importlib.metadata import PackageNotFoundError, version + try: + __version__ = version(DISTRIBUTION_NAME) + except PackageNotFoundError: + __version__ = f"Version is unavailable. Please install {DISTRIBUTION_NAME}!" +except ImportError: + import pkg_resources + try: + __version__ = pkg_resources.get_distribution(DISTRIBUTION_NAME).version + except pkg_resources.DistributionNotFound: + __version__ = f"Version is unavailable. Please install {DISTRIBUTION_NAME}!" diff --git a/openscap_report/debug_settings.py b/openscap_report/debug_settings.py index 27a5f055..c9b80902 100644 --- a/openscap_report/debug_settings.py +++ b/openscap_report/debug_settings.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import dataclass +try: + from dataclasses import dataclass +except ImportError: + from openscap_report.dataclasses import dataclass @dataclass diff --git a/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py b/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py index 9c5c9a6b..7826d7d7 100644 --- a/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py +++ b/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py @@ -2,7 +2,10 @@ # SPDX-License-Identifier: LGPL-2.1-or-later from collections import Counter -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import List from .cpe_result_eval import EMPTY_RESULT, OVAL_RESULT_TO_CPE_RESULT, CpeResult diff --git a/openscap_report/scap_results_parser/data_structures/cpe_platform.py b/openscap_report/scap_results_parser/data_structures/cpe_platform.py index ed8ed333..04a75ea5 100644 --- a/openscap_report/scap_results_parser/data_structures/cpe_platform.py +++ b/openscap_report/scap_results_parser/data_structures/cpe_platform.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass +try: + from dataclasses import asdict, dataclass +except ImportError: + from openscap_report.dataclasses import asdict, dataclass from .cpe_logical_test import LogicalTest diff --git a/openscap_report/scap_results_parser/data_structures/group.py b/openscap_report/scap_results_parser/data_structures/group.py index 4018659d..99404ad3 100644 --- a/openscap_report/scap_results_parser/data_structures/group.py +++ b/openscap_report/scap_results_parser/data_structures/group.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import List GROUP_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/identifier.py b/openscap_report/scap_results_parser/data_structures/identifier.py index 8b0f0210..8c134a8d 100644 --- a/openscap_report/scap_results_parser/data_structures/identifier.py +++ b/openscap_report/scap_results_parser/data_structures/identifier.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass +try: + from dataclasses import asdict, dataclass +except ImportError: + from openscap_report.dataclasses import asdict, dataclass IDENTIFIER_JSON_KEYS = [ "system", diff --git a/openscap_report/scap_results_parser/data_structures/oval_definition.py b/openscap_report/scap_results_parser/data_structures/oval_definition.py index 9179afe5..45b4ff1a 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_definition.py +++ b/openscap_report/scap_results_parser/data_structures/oval_definition.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import List from .oval_node import OvalNode diff --git a/openscap_report/scap_results_parser/data_structures/oval_node.py b/openscap_report/scap_results_parser/data_structures/oval_node.py index e07b5973..47970b9f 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_node.py +++ b/openscap_report/scap_results_parser/data_structures/oval_node.py @@ -4,7 +4,10 @@ import json import logging from collections import Counter -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import List from .oval_result_eval import (EMPTY_RESULT, FULL_RESULT_TO_SHORT_RESULT, diff --git a/openscap_report/scap_results_parser/data_structures/oval_object.py b/openscap_report/scap_results_parser/data_structures/oval_object.py index 2d55d7ab..fd8aebe6 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_object.py +++ b/openscap_report/scap_results_parser/data_structures/oval_object.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict diff --git a/openscap_report/scap_results_parser/data_structures/oval_reference.py b/openscap_report/scap_results_parser/data_structures/oval_reference.py index d9c46878..fb539256 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_reference.py +++ b/openscap_report/scap_results_parser/data_structures/oval_reference.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass +try: + from dataclasses import asdict, dataclass +except ImportError: + from openscap_report.dataclasses import asdict, dataclass @dataclass diff --git a/openscap_report/scap_results_parser/data_structures/oval_state.py b/openscap_report/scap_results_parser/data_structures/oval_state.py index 6cab8f01..bbd5a500 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_state.py +++ b/openscap_report/scap_results_parser/data_structures/oval_state.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict diff --git a/openscap_report/scap_results_parser/data_structures/oval_test.py b/openscap_report/scap_results_parser/data_structures/oval_test.py index 79a3a74d..cd824d1d 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_test.py +++ b/openscap_report/scap_results_parser/data_structures/oval_test.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List, Union from .oval_object import OvalObject diff --git a/openscap_report/scap_results_parser/data_structures/oval_variable.py b/openscap_report/scap_results_parser/data_structures/oval_variable.py index c8229ae3..cacf6c0b 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_variable.py +++ b/openscap_report/scap_results_parser/data_structures/oval_variable.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict diff --git a/openscap_report/scap_results_parser/data_structures/profile_info.py b/openscap_report/scap_results_parser/data_structures/profile_info.py index 322618f0..b6c4d70f 100644 --- a/openscap_report/scap_results_parser/data_structures/profile_info.py +++ b/openscap_report/scap_results_parser/data_structures/profile_info.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List PROFILE_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/reference.py b/openscap_report/scap_results_parser/data_structures/reference.py index e3374dfd..65cc42df 100644 --- a/openscap_report/scap_results_parser/data_structures/reference.py +++ b/openscap_report/scap_results_parser/data_structures/reference.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass +try: + from dataclasses import asdict, dataclass +except ImportError: + from openscap_report.dataclasses import asdict, dataclass from typing import List REFERENCE_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/remediation.py b/openscap_report/scap_results_parser/data_structures/remediation.py index dabd192e..42cd3dcd 100644 --- a/openscap_report/scap_results_parser/data_structures/remediation.py +++ b/openscap_report/scap_results_parser/data_structures/remediation.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass +try: + from dataclasses import asdict, dataclass +except ImportError: + from openscap_report.dataclasses import asdict, dataclass REMEDIATION_JSON_KEYS = [ "remediation_id", diff --git a/openscap_report/scap_results_parser/data_structures/report.py b/openscap_report/scap_results_parser/data_structures/report.py index 45ad6092..4236c9e7 100644 --- a/openscap_report/scap_results_parser/data_structures/report.py +++ b/openscap_report/scap_results_parser/data_structures/report.py @@ -2,7 +2,10 @@ # SPDX-License-Identifier: LGPL-2.1-or-later import logging -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict from ..exceptions import MissingProcessableRules diff --git a/openscap_report/scap_results_parser/data_structures/result_of_scan.py b/openscap_report/scap_results_parser/data_structures/result_of_scan.py index e65daee4..38912d76 100644 --- a/openscap_report/scap_results_parser/data_structures/result_of_scan.py +++ b/openscap_report/scap_results_parser/data_structures/result_of_scan.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List SCAN_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/rule.py b/openscap_report/scap_results_parser/data_structures/rule.py index 9c849893..689d1932 100644 --- a/openscap_report/scap_results_parser/data_structures/rule.py +++ b/openscap_report/scap_results_parser/data_structures/rule.py @@ -1,7 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass, field +try: + from dataclasses import asdict, dataclass, field +except ImportError: + from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List from .cpe_platform import Platform diff --git a/openscap_report/scap_results_parser/data_structures/warning.py b/openscap_report/scap_results_parser/data_structures/warning.py index 79eb85b5..36f7fa5a 100644 --- a/openscap_report/scap_results_parser/data_structures/warning.py +++ b/openscap_report/scap_results_parser/data_structures/warning.py @@ -1,6 +1,9 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from dataclasses import asdict, dataclass +try: + from dataclasses import asdict, dataclass +except ImportError: + from openscap_report.dataclasses import asdict, dataclass WARNING_JSON_KEYS = [ "text", diff --git a/openscap_report/scap_results_parser/parsers/oval_result_parser.py b/openscap_report/scap_results_parser/parsers/oval_result_parser.py index 298d523f..92e1eb7e 100644 --- a/openscap_report/scap_results_parser/parsers/oval_result_parser.py +++ b/openscap_report/scap_results_parser/parsers/oval_result_parser.py @@ -3,7 +3,10 @@ import logging import uuid -from dataclasses import dataclass +try: + from dataclasses import dataclass +except ImportError: + from openscap_report.dataclasses import dataclass from lxml.etree import Element diff --git a/openscap_report/scap_results_parser/parsers/rule_parser.py b/openscap_report/scap_results_parser/parsers/rule_parser.py index 0112e68a..b150d77c 100644 --- a/openscap_report/scap_results_parser/parsers/rule_parser.py +++ b/openscap_report/scap_results_parser/parsers/rule_parser.py @@ -2,7 +2,10 @@ # SPDX-License-Identifier: LGPL-2.1-or-later import collections -from dataclasses import replace +try: + from dataclasses import replace +except ImportError: + from openscap_report.dataclasses import replace from ..data_structures import Identifier, Reference, Rule, RuleWarning from ..namespaces import NAMESPACES diff --git a/setup.py b/setup.py index 5c6c8030..ba42f33a 100644 --- a/setup.py +++ b/setup.py @@ -30,5 +30,5 @@ def get_long_description(): 'oscap-report=openscap_report.cli:main', ], }, - python_requires='>=3.8', + python_requires='>=3.6', ) diff --git a/tests/test_utils.py b/tests/test_utils.py index c637bb5b..aaba827e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,7 +3,10 @@ import argparse import tempfile -from dataclasses import replace +try: + from dataclasses import replace +except ImportError: + from openscap_report.dataclasses import replace from pathlib import Path try: diff --git a/tests/unit_tests/test_scap_result_parser.py b/tests/unit_tests/test_scap_result_parser.py index 40e06ab8..467f1861 100644 --- a/tests/unit_tests/test_scap_result_parser.py +++ b/tests/unit_tests/test_scap_result_parser.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: LGPL-2.1-or-later import logging -from contextlib import nullcontext as does_not_raise import pytest from lxml.etree import XMLSyntaxError @@ -56,6 +55,17 @@ def test_validation(file_path, result): assert parser.validate(ARF_SCHEMAS_PATH) == result +class does_not_raise(object): + def __init__(self, enter_result=None): + self.enter_result = enter_result + + def __enter__(self): + return self.enter_result + + def __exit__(self, *excinfo): + pass + + @pytest.mark.unit_test @pytest.mark.parametrize("file_path, expectation, e_msg", [ (PATH_TO_ARF, does_not_raise(), ""), diff --git a/tox.ini b/tox.ini index 8d55872c..292909ca 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = clean, py3, py38, report, code_style +envlist = clean, py3, py36, py38, report, code_style [pytest] markers = @@ -17,7 +17,6 @@ commands = py.test --cov=openscap_report --cov-append --cov-report=term-missing -m "not integration_test" py.test --cov-report=term-missing -m "not unit_test" deps = - pytest pytest-cov jsonschema -rrequirements.txt From fbc1fc8ec436c4107484c6a0741411562dc058ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 11:17:29 +0200 Subject: [PATCH 3/9] Use default Python on CS 8 This change should make the test environment similar to RHEL 8 where the Python 3.6 is used as default. --- .github/workflows/gating.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/gating.yaml b/.github/workflows/gating.yaml index 209c8a6b..16fb17af 100644 --- a/.github/workflows/gating.yaml +++ b/.github/workflows/gating.yaml @@ -32,11 +32,11 @@ jobs: image: quay.io/centos/centos:stream8 steps: - name: Install Deps - run: dnf install -y python38 python38-devel python38-setuptools rpm-build + run: dnf install -y python3 python3-devel python3-setuptools rpm-build - name: Checkout uses: actions/checkout@v3 - name: Build - run: python3.8 setup.py bdist_rpm --requires=python38-lxml,python38-jinja2 --build-requires=python38-devel,python38-setuptools --python=python3.8 + run: python3 setup.py bdist_rpm --requires=python3-lxml,python3-jinja2 --build-requires=python3-devel,python3-setuptools - name: RPM install run: dnf install -y ./dist/openscap-report-*.noarch.rpm - name: Test parameter -h From 58efbfd20116d89d6fd7b1dd39592a07340b80be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 13:43:23 +0200 Subject: [PATCH 4/9] Update RHEL 8 spec file Build with the default Python 3 (Python 3.6). Builiding with Python 3.8 is possible in EPEL but not for CS 8 where the default module must be used. --- spec/rhel8/openscap-report.spec | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/spec/rhel8/openscap-report.spec b/spec/rhel8/openscap-report.spec index adf2eb46..c8ee3202 100644 --- a/spec/rhel8/openscap-report.spec +++ b/spec/rhel8/openscap-report.spec @@ -11,15 +11,15 @@ URL: https://github.com/OpenSCAP/%{name} Source0: https://github.com/OpenSCAP/%{name}/releases/download/v%{version}/%{name}-%{version}.tar.gz BuildArch: noarch -BuildRequires: python38-devel -BuildRequires: python38-rpm-macros +BuildRequires: python3-devel +BuildRequires: python3-rpm-macros BuildRequires: python3-sphinx BuildRequires: python3-sphinx_rtd_theme Provides: bundled(patternfly) = 4 -Requires: python38-lxml -Requires: python38-jinja2 +Requires: python3-lxml +Requires: python3-jinja2 Recommends: redhat-display-fonts Recommends: redhat-text-fonts From 03db326c8d38cdf8ddbde78b504092dddd09b1ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 14:16:02 +0200 Subject: [PATCH 5/9] Use a separate directory with an init file This helps simplify the import statements in individual files. --- openscap_report/dataclasses/__init__.py | 4 ++++ openscap_report/{ => dataclasses}/dataclasses.py | 0 openscap_report/debug_settings.py | 5 +---- .../scap_results_parser/data_structures/cpe_logical_test.py | 5 +---- .../scap_results_parser/data_structures/cpe_platform.py | 5 +---- openscap_report/scap_results_parser/data_structures/group.py | 5 +---- .../scap_results_parser/data_structures/identifier.py | 5 +---- .../scap_results_parser/data_structures/oval_definition.py | 5 +---- .../scap_results_parser/data_structures/oval_node.py | 5 +---- .../scap_results_parser/data_structures/oval_object.py | 5 +---- .../scap_results_parser/data_structures/oval_reference.py | 5 +---- .../scap_results_parser/data_structures/oval_state.py | 5 +---- .../scap_results_parser/data_structures/oval_test.py | 5 +---- .../scap_results_parser/data_structures/oval_variable.py | 5 +---- .../scap_results_parser/data_structures/profile_info.py | 5 +---- .../scap_results_parser/data_structures/reference.py | 5 +---- .../scap_results_parser/data_structures/remediation.py | 5 +---- .../scap_results_parser/data_structures/report.py | 5 +---- .../scap_results_parser/data_structures/result_of_scan.py | 5 +---- openscap_report/scap_results_parser/data_structures/rule.py | 5 +---- .../scap_results_parser/data_structures/warning.py | 5 +---- .../scap_results_parser/parsers/oval_result_parser.py | 5 +---- openscap_report/scap_results_parser/parsers/rule_parser.py | 5 +---- tests/test_utils.py | 5 +---- 24 files changed, 26 insertions(+), 88 deletions(-) create mode 100644 openscap_report/dataclasses/__init__.py rename openscap_report/{ => dataclasses}/dataclasses.py (100%) diff --git a/openscap_report/dataclasses/__init__.py b/openscap_report/dataclasses/__init__.py new file mode 100644 index 00000000..7a171173 --- /dev/null +++ b/openscap_report/dataclasses/__init__.py @@ -0,0 +1,4 @@ +try: + from dataclasses import dataclass, asdict, field, replace +except ImportError: + from .dataclasses import dataclass, asdict, field, replace diff --git a/openscap_report/dataclasses.py b/openscap_report/dataclasses/dataclasses.py similarity index 100% rename from openscap_report/dataclasses.py rename to openscap_report/dataclasses/dataclasses.py diff --git a/openscap_report/debug_settings.py b/openscap_report/debug_settings.py index c9b80902..23e59bfb 100644 --- a/openscap_report/debug_settings.py +++ b/openscap_report/debug_settings.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import dataclass -except ImportError: - from openscap_report.dataclasses import dataclass +from openscap_report.dataclasses import dataclass @dataclass diff --git a/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py b/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py index 7826d7d7..8221dc91 100644 --- a/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py +++ b/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py @@ -2,10 +2,7 @@ # SPDX-License-Identifier: LGPL-2.1-or-later from collections import Counter -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import List from .cpe_result_eval import EMPTY_RESULT, OVAL_RESULT_TO_CPE_RESULT, CpeResult diff --git a/openscap_report/scap_results_parser/data_structures/cpe_platform.py b/openscap_report/scap_results_parser/data_structures/cpe_platform.py index 04a75ea5..9dbd9a58 100644 --- a/openscap_report/scap_results_parser/data_structures/cpe_platform.py +++ b/openscap_report/scap_results_parser/data_structures/cpe_platform.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass -except ImportError: - from openscap_report.dataclasses import asdict, dataclass +from openscap_report.dataclasses import asdict, dataclass from .cpe_logical_test import LogicalTest diff --git a/openscap_report/scap_results_parser/data_structures/group.py b/openscap_report/scap_results_parser/data_structures/group.py index 99404ad3..21a98061 100644 --- a/openscap_report/scap_results_parser/data_structures/group.py +++ b/openscap_report/scap_results_parser/data_structures/group.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import List GROUP_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/identifier.py b/openscap_report/scap_results_parser/data_structures/identifier.py index 8c134a8d..b19c3f2c 100644 --- a/openscap_report/scap_results_parser/data_structures/identifier.py +++ b/openscap_report/scap_results_parser/data_structures/identifier.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass -except ImportError: - from openscap_report.dataclasses import asdict, dataclass +from openscap_report.dataclasses import asdict, dataclass IDENTIFIER_JSON_KEYS = [ "system", diff --git a/openscap_report/scap_results_parser/data_structures/oval_definition.py b/openscap_report/scap_results_parser/data_structures/oval_definition.py index 45b4ff1a..693af105 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_definition.py +++ b/openscap_report/scap_results_parser/data_structures/oval_definition.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import List from .oval_node import OvalNode diff --git a/openscap_report/scap_results_parser/data_structures/oval_node.py b/openscap_report/scap_results_parser/data_structures/oval_node.py index 47970b9f..a924f5b3 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_node.py +++ b/openscap_report/scap_results_parser/data_structures/oval_node.py @@ -4,10 +4,7 @@ import json import logging from collections import Counter -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import List from .oval_result_eval import (EMPTY_RESULT, FULL_RESULT_TO_SHORT_RESULT, diff --git a/openscap_report/scap_results_parser/data_structures/oval_object.py b/openscap_report/scap_results_parser/data_structures/oval_object.py index fd8aebe6..f700ae07 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_object.py +++ b/openscap_report/scap_results_parser/data_structures/oval_object.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict diff --git a/openscap_report/scap_results_parser/data_structures/oval_reference.py b/openscap_report/scap_results_parser/data_structures/oval_reference.py index fb539256..96030bc9 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_reference.py +++ b/openscap_report/scap_results_parser/data_structures/oval_reference.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass -except ImportError: - from openscap_report.dataclasses import asdict, dataclass +from openscap_report.dataclasses import asdict, dataclass @dataclass diff --git a/openscap_report/scap_results_parser/data_structures/oval_state.py b/openscap_report/scap_results_parser/data_structures/oval_state.py index bbd5a500..e4ca387e 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_state.py +++ b/openscap_report/scap_results_parser/data_structures/oval_state.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict diff --git a/openscap_report/scap_results_parser/data_structures/oval_test.py b/openscap_report/scap_results_parser/data_structures/oval_test.py index cd824d1d..d75a0298 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_test.py +++ b/openscap_report/scap_results_parser/data_structures/oval_test.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List, Union from .oval_object import OvalObject diff --git a/openscap_report/scap_results_parser/data_structures/oval_variable.py b/openscap_report/scap_results_parser/data_structures/oval_variable.py index cacf6c0b..809e5b7f 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_variable.py +++ b/openscap_report/scap_results_parser/data_structures/oval_variable.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict diff --git a/openscap_report/scap_results_parser/data_structures/profile_info.py b/openscap_report/scap_results_parser/data_structures/profile_info.py index b6c4d70f..9b29539a 100644 --- a/openscap_report/scap_results_parser/data_structures/profile_info.py +++ b/openscap_report/scap_results_parser/data_structures/profile_info.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List PROFILE_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/reference.py b/openscap_report/scap_results_parser/data_structures/reference.py index 65cc42df..c1020ecd 100644 --- a/openscap_report/scap_results_parser/data_structures/reference.py +++ b/openscap_report/scap_results_parser/data_structures/reference.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass -except ImportError: - from openscap_report.dataclasses import asdict, dataclass +from openscap_report.dataclasses import asdict, dataclass from typing import List REFERENCE_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/remediation.py b/openscap_report/scap_results_parser/data_structures/remediation.py index 42cd3dcd..d8a4f0a2 100644 --- a/openscap_report/scap_results_parser/data_structures/remediation.py +++ b/openscap_report/scap_results_parser/data_structures/remediation.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass -except ImportError: - from openscap_report.dataclasses import asdict, dataclass +from openscap_report.dataclasses import asdict, dataclass REMEDIATION_JSON_KEYS = [ "remediation_id", diff --git a/openscap_report/scap_results_parser/data_structures/report.py b/openscap_report/scap_results_parser/data_structures/report.py index 4236c9e7..962a54d3 100644 --- a/openscap_report/scap_results_parser/data_structures/report.py +++ b/openscap_report/scap_results_parser/data_structures/report.py @@ -2,10 +2,7 @@ # SPDX-License-Identifier: LGPL-2.1-or-later import logging -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict from ..exceptions import MissingProcessableRules diff --git a/openscap_report/scap_results_parser/data_structures/result_of_scan.py b/openscap_report/scap_results_parser/data_structures/result_of_scan.py index 38912d76..fa795659 100644 --- a/openscap_report/scap_results_parser/data_structures/result_of_scan.py +++ b/openscap_report/scap_results_parser/data_structures/result_of_scan.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List SCAN_JSON_KEYS = [ diff --git a/openscap_report/scap_results_parser/data_structures/rule.py b/openscap_report/scap_results_parser/data_structures/rule.py index 689d1932..91b72407 100644 --- a/openscap_report/scap_results_parser/data_structures/rule.py +++ b/openscap_report/scap_results_parser/data_structures/rule.py @@ -1,10 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass, field -except ImportError: - from openscap_report.dataclasses import asdict, dataclass, field +from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List from .cpe_platform import Platform diff --git a/openscap_report/scap_results_parser/data_structures/warning.py b/openscap_report/scap_results_parser/data_structures/warning.py index 36f7fa5a..7568c8c2 100644 --- a/openscap_report/scap_results_parser/data_structures/warning.py +++ b/openscap_report/scap_results_parser/data_structures/warning.py @@ -1,9 +1,6 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -try: - from dataclasses import asdict, dataclass -except ImportError: - from openscap_report.dataclasses import asdict, dataclass +from openscap_report.dataclasses import asdict, dataclass WARNING_JSON_KEYS = [ "text", diff --git a/openscap_report/scap_results_parser/parsers/oval_result_parser.py b/openscap_report/scap_results_parser/parsers/oval_result_parser.py index 92e1eb7e..889b6d45 100644 --- a/openscap_report/scap_results_parser/parsers/oval_result_parser.py +++ b/openscap_report/scap_results_parser/parsers/oval_result_parser.py @@ -3,10 +3,7 @@ import logging import uuid -try: - from dataclasses import dataclass -except ImportError: - from openscap_report.dataclasses import dataclass +from openscap_report.dataclasses import dataclass from lxml.etree import Element diff --git a/openscap_report/scap_results_parser/parsers/rule_parser.py b/openscap_report/scap_results_parser/parsers/rule_parser.py index b150d77c..f7ba94a0 100644 --- a/openscap_report/scap_results_parser/parsers/rule_parser.py +++ b/openscap_report/scap_results_parser/parsers/rule_parser.py @@ -2,10 +2,7 @@ # SPDX-License-Identifier: LGPL-2.1-or-later import collections -try: - from dataclasses import replace -except ImportError: - from openscap_report.dataclasses import replace +from openscap_report.dataclasses import replace from ..data_structures import Identifier, Reference, Rule, RuleWarning from ..namespaces import NAMESPACES diff --git a/tests/test_utils.py b/tests/test_utils.py index aaba827e..a1ec8a5c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,10 +3,7 @@ import argparse import tempfile -try: - from dataclasses import replace -except ImportError: - from openscap_report.dataclasses import replace +from openscap_report.dataclasses import replace from pathlib import Path try: From caba005799bf14873bdcd27fb8b87a5fa1d65a60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 15:45:55 +0200 Subject: [PATCH 6/9] Reformat dataclasses.py --- openscap_report/dataclasses/dataclasses.py | 594 ++++++++++++--------- 1 file changed, 340 insertions(+), 254 deletions(-) diff --git a/openscap_report/dataclasses/dataclasses.py b/openscap_report/dataclasses/dataclasses.py index c287c0c6..a98a7f51 100644 --- a/openscap_report/dataclasses/dataclasses.py +++ b/openscap_report/dataclasses/dataclasses.py @@ -5,21 +5,21 @@ import inspect import keyword -__all__ = ['dataclass', - 'field', - 'Field', - 'FrozenInstanceError', - 'InitVar', - 'MISSING', - - # Helper functions. - 'fields', - 'asdict', - 'astuple', - 'make_dataclass', - 'replace', - 'is_dataclass', - ] +__all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + # Helper functions. + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", +] # Conditions for adding methods. The boxes indicate what action the # dataclass decorator takes. For all of these tables, when I talk @@ -148,56 +148,69 @@ # Raised when an attempt is made to modify a frozen class. -class FrozenInstanceError(AttributeError): pass +class FrozenInstanceError(AttributeError): + pass + # A sentinel object for default values to signal that a default # factory will be used. This is given a nice repr() which will appear # in the function signature of dataclasses' constructors. class _HAS_DEFAULT_FACTORY_CLASS: def __repr__(self): - return '' + return "" + + _HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS() + # A sentinel object to detect if a parameter is supplied or not. Use # a class to give it a better repr. class _MISSING_TYPE: pass + + MISSING = _MISSING_TYPE() # Since most per-field metadata will be unused, create an empty # read-only proxy that can be shared among all fields. _EMPTY_METADATA = types.MappingProxyType({}) + # Markers for the various kinds of fields and pseudo-fields. class _FIELD_BASE: def __init__(self, name): self.name = name + def __repr__(self): return self.name -_FIELD = _FIELD_BASE('_FIELD') -_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR') -_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR') + + +_FIELD = _FIELD_BASE("_FIELD") +_FIELD_CLASSVAR = _FIELD_BASE("_FIELD_CLASSVAR") +_FIELD_INITVAR = _FIELD_BASE("_FIELD_INITVAR") # The name of an attribute on the class where we store the Field # objects. Also used to check if a class is a Data Class. -_FIELDS = '__dataclass_fields__' +_FIELDS = "__dataclass_fields__" # The name of an attribute on the class that stores the parameters to # @dataclass. -_PARAMS = '__dataclass_params__' +_PARAMS = "__dataclass_params__" # The name of the function, that if it exists, is called at the end of # __init__. -_POST_INIT_NAME = '__post_init__' +_POST_INIT_NAME = "__post_init__" # String regex that string annotations for ClassVar or InitVar must match. # Allows "identifier.identifier[" or "identifier[". # https://bugs.python.org/issue33453 for details. -_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)') +_MODULE_IDENTIFIER_RE = re.compile(r"^(?:\s*(\w+)\s*\.)?\s*(\w+)") + class _InitVarMeta(type): - def __getitem__(self, params): - return self + def __getitem__(cls, params): + return cls + class InitVar(metaclass=_InitVarMeta): pass @@ -214,20 +227,20 @@ class InitVar(metaclass=_InitVarMeta): # When cls._FIELDS is filled in with a list of Field objects, the name # and type fields will have been populated. class Field: - __slots__ = ('name', - 'type', - 'default', - 'default_factory', - 'repr', - 'hash', - 'init', - 'compare', - 'metadata', - '_field_type', # Private: not to be used by user code. - ) - - def __init__(self, default, default_factory, init, repr, hash, compare, - metadata): + __slots__ = ( + "name", + "type", + "default", + "default_factory", + "repr", + "hash", + "init", + "compare", + "metadata", + "_field_type", # Private: not to be used by user code. + ) + + def __init__(self, default, default_factory, init, repr, hash, compare, metadata): self.name = None self.type = None self.default = default @@ -236,24 +249,28 @@ def __init__(self, default, default_factory, init, repr, hash, compare, self.repr = repr self.hash = hash self.compare = compare - self.metadata = (_EMPTY_METADATA - if metadata is None or len(metadata) == 0 else - types.MappingProxyType(metadata)) + self.metadata = ( + _EMPTY_METADATA + if metadata is None or len(metadata) == 0 + else types.MappingProxyType(metadata) + ) self._field_type = None def __repr__(self): - return ('Field(' - f'name={self.name!r},' - f'type={self.type!r},' - f'default={self.default!r},' - f'default_factory={self.default_factory!r},' - f'init={self.init!r},' - f'repr={self.repr!r},' - f'hash={self.hash!r},' - f'compare={self.compare!r},' - f'metadata={self.metadata!r},' - f'_field_type={self._field_type}' - ')') + return ( + "Field(" + f"name={self.name!r}," + f"type={self.type!r}," + f"default={self.default!r}," + f"default_factory={self.default_factory!r}," + f"init={self.init!r}," + f"repr={self.repr!r}," + f"hash={self.hash!r}," + f"compare={self.compare!r}," + f"metadata={self.metadata!r}," + f"_field_type={self._field_type}" + ")" + ) # This is used to support the PEP 487 __set_name__ protocol in the # case where we're using a field that contains a descriptor as a @@ -264,7 +281,7 @@ def __repr__(self): # with the default value, so the end result is a descriptor that # had __set_name__ called on it at the right time. def __set_name__(self, owner, name): - func = getattr(type(self.default), '__set_name__', None) + func = getattr(type(self.default), "__set_name__", None) if func: # There is a __set_name__ method on the descriptor, call # it. @@ -272,13 +289,14 @@ def __set_name__(self, owner, name): class _DataclassParams: - __slots__ = ('init', - 'repr', - 'eq', - 'order', - 'unsafe_hash', - 'frozen', - ) + __slots__ = ( + "init", + "repr", + "eq", + "order", + "unsafe_hash", + "frozen", + ) def __init__(self, init, repr, eq, order, unsafe_hash, frozen): self.init = init @@ -289,21 +307,31 @@ def __init__(self, init, repr, eq, order, unsafe_hash, frozen): self.frozen = frozen def __repr__(self): - return ('_DataclassParams(' - f'init={self.init!r},' - f'repr={self.repr!r},' - f'eq={self.eq!r},' - f'order={self.order!r},' - f'unsafe_hash={self.unsafe_hash!r},' - f'frozen={self.frozen!r}' - ')') + return ( + "_DataclassParams(" + f"init={self.init!r}," + f"repr={self.repr!r}," + f"eq={self.eq!r}," + f"order={self.order!r}," + f"unsafe_hash={self.unsafe_hash!r}," + f"frozen={self.frozen!r}" + ")" + ) # This function is used instead of exposing Field creation directly, # so that a type checker can be told (via overloads) that this is a # function whose type depends on its parameters. -def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, - hash=None, compare=True, metadata=None): +def field( + *, + default=MISSING, + default_factory=MISSING, + init=True, + repr=True, + hash=None, + compare=True, + metadata=None, +): """Return an object to identify dataclass fields. default is the default value of the field. default_factory is a @@ -319,9 +347,8 @@ def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True, """ if default is not MISSING and default_factory is not MISSING: - raise ValueError('cannot specify both default and default_factory') - return Field(default, default_factory, init, repr, hash, compare, - metadata) + raise ValueError("cannot specify both default and default_factory") + return Field(default, default_factory, init, repr, hash, compare, metadata) def _tuple_str(obj_name, fields): @@ -331,27 +358,26 @@ def _tuple_str(obj_name, fields): # Special case for the 0-tuple. if not fields: - return '()' + return "()" # Note the trailing comma, needed if this turns out to be a 1-tuple. return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)' -def _create_fn(name, args, body, *, globals=None, locals=None, - return_type=MISSING): +def _create_fn(name, args, body, *, globals=None, locals=None, return_type=MISSING): # Note that we mutate locals when exec() is called. Caller # beware! The only callers are internal to this module, so no # worries about external callers. if locals is None: locals = {} - return_annotation = '' + return_annotation = "" if return_type is not MISSING: - locals['_return_type'] = return_type - return_annotation = '->_return_type' - args = ','.join(args) - body = '\n'.join(f' {b}' for b in body) + locals["_return_type"] = return_type + return_annotation = "->_return_type" + args = ",".join(args) + body = "\n".join(f" {b}" for b in body) # Compute the text of the entire function. - txt = f'def {name}({args}){return_annotation}:\n{body}' + txt = f"def {name}({args}){return_annotation}:\n{body}" exec(txt, globals, locals) return locals[name] @@ -365,23 +391,25 @@ def _field_assign(frozen, name, value, self_name): # self_name is what "self" is called in this function: don't # hard-code "self", since that might be a field name. if frozen: - return f'object.__setattr__({self_name},{name!r},{value})' - return f'{self_name}.{name}={value}' + return f"object.__setattr__({self_name},{name!r},{value})" + return f"{self_name}.{name}={value}" def _field_init(f, frozen, globals, self_name): # Return the text of the line in the body of __init__ that will # initialize this field. - default_name = f'_dflt_{f.name}' + default_name = f"_dflt_{f.name}" if f.default_factory is not MISSING: if f.init: # This field has a default factory. If a parameter is # given, use it. If not, call the factory. globals[default_name] = f.default_factory - value = (f'{default_name}() ' - f'if {f.name} is _HAS_DEFAULT_FACTORY ' - f'else {f.name}') + value = ( + f"{default_name}() " + f"if {f.name} is _HAS_DEFAULT_FACTORY " + f"else {f.name}" + ) else: # This is a field that's not in the __init__ params, but # has a default factory function. It needs to be @@ -398,7 +426,7 @@ def _field_init(f, frozen, globals, self_name): # (which, after all, is why we have a factory function!). globals[default_name] = f.default_factory - value = f'{default_name}()' + value = f"{default_name}()" else: # No default factory. if f.init: @@ -428,18 +456,19 @@ def _init_param(f): # example, the equivalent of 'x:int=3' (except instead of 'int', # reference a variable set to int, and instead of '3', reference a # variable set to 3). + default = None if f.default is MISSING and f.default_factory is MISSING: # There's no default, and no default_factory, just output the # variable name and type. - default = '' + default = "" elif f.default is not MISSING: # There's a default, this will be the name that's used to look # it up. - default = f'=_dflt_{f.name}' + default = f"=_dflt_{f.name}" elif f.default_factory is not MISSING: # There's a factory function. Set a marker. - default = '=_HAS_DEFAULT_FACTORY' - return f'{f.name}:_type_{f.name}{default}' + default = "=_HAS_DEFAULT_FACTORY" + return f"{f.name}:_type_{f.name}{default}" def _init_fn(fields, frozen, has_post_init, self_name): @@ -457,11 +486,11 @@ def _init_fn(fields, frozen, has_post_init, self_name): if not (f.default is MISSING and f.default_factory is MISSING): seen_default = True elif seen_default: - raise TypeError(f'non-default argument {f.name!r} ' - 'follows default argument') + raise TypeError( + f"non-default argument {f.name!r} " "follows default argument" + ) - globals = {'MISSING': MISSING, - '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY} + globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY} body_lines = [] for f in fields: @@ -473,55 +502,67 @@ def _init_fn(fields, frozen, has_post_init, self_name): # Does this class have a post-init function? if has_post_init: - params_str = ','.join(f.name for f in fields - if f._field_type is _FIELD_INITVAR) - body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})') + params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR) + body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})") # If no body lines, use 'pass'. if not body_lines: - body_lines = ['pass'] + body_lines = ["pass"] - locals = {f'_type_{f.name}': f.type for f in fields} - return _create_fn('__init__', - [self_name] + [_init_param(f) for f in fields if f.init], - body_lines, - locals=locals, - globals=globals, - return_type=None) + locals = {f"_type_{f.name}": f.type for f in fields} + return _create_fn( + "__init__", + [self_name] + [_init_param(f) for f in fields if f.init], + body_lines, + locals=locals, + globals=globals, + return_type=None, + ) def _repr_fn(fields): - return _create_fn('__repr__', - ('self',), - ['return self.__class__.__qualname__ + f"(' + - ', '.join([f"{f.name}={{self.{f.name}!r}}" - for f in fields]) + - ')"']) + return _create_fn( + "__repr__", + ("self",), + [ + 'return self.__class__.__qualname__ + f"(' + + ", ".join([f"{f.name}={{self.{f.name}!r}}" for f in fields]) + + ')"' + ], + ) def _frozen_get_del_attr(cls, fields): # XXX: globals is modified on the first call to _create_fn, then # the modified version is used in the second call. Is this okay? - globals = {'cls': cls, - 'FrozenInstanceError': FrozenInstanceError} + globals = {"cls": cls, "FrozenInstanceError": FrozenInstanceError} if fields: - fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)' + fields_str = "(" + ",".join(repr(f.name) for f in fields) + ",)" else: # Special case for the zero-length tuple. - fields_str = '()' - return (_create_fn('__setattr__', - ('self', 'name', 'value'), - (f'if type(self) is cls or name in {fields_str}:', - ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', - f'super(cls, self).__setattr__(name, value)'), - globals=globals), - _create_fn('__delattr__', - ('self', 'name'), - (f'if type(self) is cls or name in {fields_str}:', - ' raise FrozenInstanceError(f"cannot delete field {name!r}")', - f'super(cls, self).__delattr__(name)'), - globals=globals), - ) + fields_str = "()" + return ( + _create_fn( + "__setattr__", + ("self", "name", "value"), + ( + f"if type(self) is cls or name in {fields_str}:", + ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', + f"super(cls, self).__setattr__(name, value)", + ), + globals=globals, + ), + _create_fn( + "__delattr__", + ("self", "name"), + ( + f"if type(self) is cls or name in {fields_str}:", + ' raise FrozenInstanceError(f"cannot delete field {name!r}")', + f"super(cls, self).__delattr__(name)", + ), + globals=globals, + ), + ) def _cmp_fn(name, op, self_tuple, other_tuple): @@ -530,18 +571,20 @@ def _cmp_fn(name, op, self_tuple, other_tuple): # '(self.x,self.y)' and other_tuple is the string # '(other.x,other.y)'. - return _create_fn(name, - ('self', 'other'), - [ 'if other.__class__ is self.__class__:', - f' return {self_tuple}{op}{other_tuple}', - 'return NotImplemented']) + return _create_fn( + name, + ("self", "other"), + [ + "if other.__class__ is self.__class__:", + f" return {self_tuple}{op}{other_tuple}", + "return NotImplemented", + ], + ) def _hash_fn(fields): - self_tuple = _tuple_str('self', fields) - return _create_fn('__hash__', - ('self',), - [f'return hash({self_tuple})']) + self_tuple = _tuple_str("self", fields) + return _create_fn("__hash__", ("self",), [f"return hash({self_tuple})"]) def _is_classvar(a_type, typing): @@ -654,12 +697,12 @@ def _get_field(cls, a_name, a_type): # annotation to be a ClassVar. So, only look for ClassVar if # typing has been imported by any module (not necessarily cls's # module). - typing = sys.modules.get('typing') + typing = sys.modules.get("typing") if typing: - if (_is_classvar(a_type, typing) - or (isinstance(f.type, str) - and _is_type(f.type, cls, typing, typing.ClassVar, - _is_classvar))): + if _is_classvar(a_type, typing) or ( + isinstance(f.type, str) + and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) + ): f._field_type = _FIELD_CLASSVAR # If the type is InitVar, or if it's a matching string annotation, @@ -668,10 +711,10 @@ def _get_field(cls, a_name, a_type): # The module we're checking against is the module we're # currently in (dataclasses.py). dataclasses = sys.modules[__name__] - if (_is_initvar(a_type, dataclasses) - or (isinstance(f.type, str) - and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, - _is_initvar))): + if _is_initvar(a_type, dataclasses) or ( + isinstance(f.type, str) + and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) + ): f._field_type = _FIELD_INITVAR # Validations for individual fields. This is delayed until now, @@ -681,8 +724,7 @@ def _get_field(cls, a_name, a_type): # Special restrictions for ClassVar and InitVar. if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR): if f.default_factory is not MISSING: - raise TypeError(f'field {f.name} cannot have a ' - 'default factory') + raise TypeError(f"field {f.name} cannot have a " "default factory") # Should I check for other field settings? default_factory # seems the most serious to check for. Maybe add others. For # example, how about init=False (or really, @@ -691,8 +733,10 @@ def _get_field(cls, a_name, a_type): # For real fields, disallow mutable defaults for known types. if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): - raise ValueError(f'mutable default {type(f.default)} for field ' - f'{f.name} is not allowed: use default_factory') + raise ValueError( + f"mutable default {type(f.default)} for field " + f"{f.name} is not allowed: use default_factory" + ) return f @@ -711,17 +755,20 @@ def _set_new_attribute(cls, name, value): # take. The common case is to do nothing, so instead of providing a # function that is a no-op, use None to signify that. + def _hash_set_none(cls, fields): return None + def _hash_add(cls, fields): flds = [f for f in fields if (f.compare if f.hash is None else f.hash)] return _hash_fn(flds) + def _hash_exception(cls, fields): # Raise an exception. - raise TypeError(f'Cannot overwrite attribute __hash__ ' - f'in class {cls.__name__}') + raise TypeError(f"Cannot overwrite attribute __hash__ " f"in class {cls.__name__}") + # # +-------------------------------------- unsafe_hash? @@ -732,23 +779,24 @@ def _hash_exception(cls, fields): # | | | | +------- action # | | | | | # v v v v v -_hash_action = {(False, False, False, False): None, - (False, False, False, True ): None, - (False, False, True, False): None, - (False, False, True, True ): None, - (False, True, False, False): _hash_set_none, - (False, True, False, True ): None, - (False, True, True, False): _hash_add, - (False, True, True, True ): None, - (True, False, False, False): _hash_add, - (True, False, False, True ): _hash_exception, - (True, False, True, False): _hash_add, - (True, False, True, True ): _hash_exception, - (True, True, False, False): _hash_add, - (True, True, False, True ): _hash_exception, - (True, True, True, False): _hash_add, - (True, True, True, True ): _hash_exception, - } +_hash_action = { + (False, False, False, False): None, + (False, False, False, True): None, + (False, False, True, False): None, + (False, False, True, True): None, + (False, True, False, False): _hash_set_none, + (False, True, False, True): None, + (False, True, True, False): _hash_add, + (False, True, True, True): None, + (True, False, False, False): _hash_add, + (True, False, False, True): _hash_exception, + (True, False, True, False): _hash_add, + (True, False, True, True): _hash_exception, + (True, True, False, False): _hash_add, + (True, True, False, True): _hash_exception, + (True, True, True, False): _hash_add, + (True, True, True, True): _hash_exception, +} # See https://bugs.python.org/issue32929#msg312829 for an if-statement # version of this table. @@ -760,8 +808,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # is defined by the base class, which is found first. fields = {} - setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order, - unsafe_hash, frozen)) + setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order, unsafe_hash, frozen)) # Find our base classes in reverse MRO order, and exclude # ourselves. In reversed order so that more derived classes @@ -792,13 +839,12 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # actual default value. Pseudo-fields ClassVars and InitVars are # included, despite the fact that they're not real fields. That's # dealt with later. - cls_annotations = cls.__dict__.get('__annotations__', {}) + cls_annotations = cls.__dict__.get("__annotations__", {}) # Now find fields in our class. While doing so, validate some # things, and set the default values (as class attributes) where # we can. - cls_fields = [_get_field(cls, name, type) - for name, type in cls_annotations.items()] + cls_fields = [_get_field(cls, name, type) for name, type in cls_annotations.items()] for f in cls_fields: fields[f.name] = f @@ -820,20 +866,18 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # Do we have any Field members that don't also have annotations? for name, value in cls.__dict__.items(): - if isinstance(value, Field) and not name in cls_annotations: - raise TypeError(f'{name!r} is a field but has no type annotation') + if isinstance(value, Field) and name not in cls_annotations: + raise TypeError(f"{name!r} is a field but has no type annotation") # Check rules that apply if we are derived from any dataclasses. if has_dataclass_bases: # Raise an exception if any of our bases are frozen, but we're not. if any_frozen_base and not frozen: - raise TypeError('cannot inherit non-frozen dataclass from a ' - 'frozen one') + raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one") # Raise an exception if we're frozen, but none of our bases are. if not any_frozen_base and frozen: - raise TypeError('cannot inherit frozen dataclass from a ' - 'non-frozen one') + raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one") # Remember all of the fields on our class (including bases). This # also marks this class as being a dataclass. @@ -844,32 +888,35 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # set __hash__ to None. This is a heuristic, as it's possible # that such a __hash__ == None was not auto-generated, but it # close enough. - class_hash = cls.__dict__.get('__hash__', MISSING) - has_explicit_hash = not (class_hash is MISSING or - (class_hash is None and '__eq__' in cls.__dict__)) + class_hash = cls.__dict__.get("__hash__", MISSING) + has_explicit_hash = not ( + class_hash is MISSING or (class_hash is None and "__eq__" in cls.__dict__) + ) # If we're generating ordering methods, we must be generating the # eq methods. if order and not eq: - raise ValueError('eq must be true if order is true') + raise ValueError("eq must be true if order is true") if init: # Does this class have a post-init function? has_post_init = hasattr(cls, _POST_INIT_NAME) # Include InitVars and regular fields (so, not ClassVars). - flds = [f for f in fields.values() - if f._field_type in (_FIELD, _FIELD_INITVAR)] - _set_new_attribute(cls, '__init__', - _init_fn(flds, - frozen, - has_post_init, - # The name to use for the "self" - # param in __init__. Use "self" - # if possible. - '__dataclass_self__' if 'self' in fields - else 'self', - )) + flds = [f for f in fields.values() if f._field_type in (_FIELD, _FIELD_INITVAR)] + _set_new_attribute( + cls, + "__init__", + _init_fn( + flds, + frozen, + has_post_init, + # The name to use for the "self" + # param in __init__. Use "self" + # if possible. + "__dataclass_self__" if "self" in fields else "self", + ), + ) # Get the fields as a list, and include only real fields. This is # used in all of the following methods. @@ -877,54 +924,58 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): if repr: flds = [f for f in field_list if f.repr] - _set_new_attribute(cls, '__repr__', _repr_fn(flds)) + _set_new_attribute(cls, "__repr__", _repr_fn(flds)) if eq: # Create _eq__ method. There's no need for a __ne__ method, # since python will call __eq__ and negate it. flds = [f for f in field_list if f.compare] - self_tuple = _tuple_str('self', flds) - other_tuple = _tuple_str('other', flds) - _set_new_attribute(cls, '__eq__', - _cmp_fn('__eq__', '==', - self_tuple, other_tuple)) + self_tuple = _tuple_str("self", flds) + other_tuple = _tuple_str("other", flds) + _set_new_attribute( + cls, "__eq__", _cmp_fn("__eq__", "==", self_tuple, other_tuple) + ) if order: # Create and set the ordering methods. flds = [f for f in field_list if f.compare] - self_tuple = _tuple_str('self', flds) - other_tuple = _tuple_str('other', flds) - for name, op in [('__lt__', '<'), - ('__le__', '<='), - ('__gt__', '>'), - ('__ge__', '>='), - ]: - if _set_new_attribute(cls, name, - _cmp_fn(name, op, self_tuple, other_tuple)): - raise TypeError(f'Cannot overwrite attribute {name} ' - f'in class {cls.__name__}. Consider using ' - 'functools.total_ordering') + self_tuple = _tuple_str("self", flds) + other_tuple = _tuple_str("other", flds) + for name, op in [ + ("__lt__", "<"), + ("__le__", "<="), + ("__gt__", ">"), + ("__ge__", ">="), + ]: + if _set_new_attribute( + cls, name, _cmp_fn(name, op, self_tuple, other_tuple) + ): + raise TypeError( + f"Cannot overwrite attribute {name} " + f"in class {cls.__name__}. Consider using " + "functools.total_ordering" + ) if frozen: for fn in _frozen_get_del_attr(cls, field_list): if _set_new_attribute(cls, fn.__name__, fn): - raise TypeError(f'Cannot overwrite attribute {fn.__name__} ' - f'in class {cls.__name__}') + raise TypeError( + f"Cannot overwrite attribute {fn.__name__} " + f"in class {cls.__name__}" + ) # Decide if/how we're going to create a hash function. - hash_action = _hash_action[bool(unsafe_hash), - bool(eq), - bool(frozen), - has_explicit_hash] + hash_action = _hash_action[ + bool(unsafe_hash), bool(eq), bool(frozen), has_explicit_hash + ] if hash_action: # No need to call _set_new_attribute here, since by the time # we're here the overwriting is unconditional. cls.__hash__ = hash_action(cls, field_list) - if not getattr(cls, '__doc__'): + if not getattr(cls, "__doc__"): # Create a class doc-string. - cls.__doc__ = (cls.__name__ + - str(inspect.signature(cls)).replace(' -> None', '')) + cls.__doc__ = cls.__name__ + str(inspect.signature(cls)).replace(" -> None", "") return cls @@ -932,8 +983,16 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # _cls should never be specified by keyword, so start it with an # underscore. The presence of _cls is used to detect if this # decorator is being called with parameters or not. -def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False, - unsafe_hash=False, frozen=False): +def dataclass( + _cls=None, + *, + init=True, + repr=True, + eq=True, + order=False, + unsafe_hash=False, + frozen=False, +): """Returns the same class as was passed in, with dunder methods added based on the fields defined in the class. @@ -969,7 +1028,7 @@ def fields(class_or_instance): try: fields = getattr(class_or_instance, _FIELDS) except AttributeError: - raise TypeError('must be called with a dataclass type or instance') + raise TypeError("must be called with a dataclass type or instance") # Exclude pseudo-fields. Note that fields is sorted by insertion # order, so the order of the tuple is as the fields were defined. @@ -1021,8 +1080,10 @@ def _asdict_inner(obj, dict_factory): elif isinstance(obj, (list, tuple)): return type(obj)(_asdict_inner(v, dict_factory) for v in obj) elif isinstance(obj, dict): - return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory)) - for k, v in obj.items()) + return type(obj)( + (_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory)) + for k, v in obj.items() + ) else: return copy.deepcopy(obj) @@ -1061,15 +1122,27 @@ def _astuple_inner(obj, tuple_factory): elif isinstance(obj, (list, tuple)): return type(obj)(_astuple_inner(v, tuple_factory) for v in obj) elif isinstance(obj, dict): - return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory)) - for k, v in obj.items()) + return type(obj)( + (_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory)) + for k, v in obj.items() + ) else: return copy.deepcopy(obj) -def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True, - repr=True, eq=True, order=False, unsafe_hash=False, - frozen=False): +def make_dataclass( + cls_name, + fields, + *, + bases=(), + namespace=None, + init=True, + repr=True, + eq=True, + order=False, + unsafe_hash=False, + frozen=False, +): """Return a new dynamically created dataclass. The dataclass name will be 'cls_name'. 'fields' is an iterable @@ -1106,31 +1179,41 @@ class C(Base): for item in fields: if isinstance(item, str): name = item - tp = 'typing.Any' + tp = "typing.Any" elif len(item) == 2: - name, tp, = item + ( + name, + tp, + ) = item elif len(item) == 3: name, tp, spec = item namespace[name] = spec else: - raise TypeError(f'Invalid field: {item!r}') + raise TypeError(f"Invalid field: {item!r}") if not isinstance(name, str) or not name.isidentifier(): - raise TypeError(f'Field names must be valid identifiers: {name!r}') + raise TypeError(f"Field names must be valid identifiers: {name!r}") if keyword.iskeyword(name): - raise TypeError(f'Field names must not be keywords: {name!r}') + raise TypeError(f"Field names must not be keywords: {name!r}") if name in seen: - raise TypeError(f'Field name duplicated: {name!r}') + raise TypeError(f"Field name duplicated: {name!r}") seen.add(name) anns[name] = tp - namespace['__annotations__'] = anns + namespace["__annotations__"] = anns # We use `types.new_class()` instead of simply `type()` to allow dynamic creation # of generic dataclassses. cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace)) - return dataclass(cls, init=init, repr=repr, eq=eq, order=order, - unsafe_hash=unsafe_hash, frozen=frozen) + return dataclass( + cls, + init=init, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + ) def replace(obj, **changes): @@ -1146,7 +1229,7 @@ class C: c = C(1, 2) c1 = replace(c, x=3) assert c1.x == 3 and c1.y == 2 - """ + """ # We're going to mutate 'changes', but that's okay because it's a # new dict, even if called with 'replace(obj, **my_changes)'. @@ -1165,15 +1248,18 @@ class C: if not f.init: # Error if this field is specified in changes. if f.name in changes: - raise ValueError(f'field {f.name} is declared with ' - 'init=False, it cannot be specified with ' - 'replace()') + raise ValueError( + f"field {f.name} is declared with " + "init=False, it cannot be specified with " + "replace()" + ) continue if f.name not in changes: if f._field_type is _FIELD_INITVAR: - raise ValueError(f"InitVar {f.name!r} " - 'must be specified with replace()') + raise ValueError( + f"InitVar {f.name!r} " "must be specified with replace()" + ) changes[f.name] = getattr(obj, f.name) # Create the new object, which calls __init__() and From f62d9a03ebe7bf529fcc6fca497fd7947d280732 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 16:35:09 +0200 Subject: [PATCH 7/9] Sort imports --- openscap_report/dataclasses/__init__.py | 4 ++-- openscap_report/dataclasses/dataclasses.py | 6 +++--- .../scap_results_parser/data_structures/cpe_logical_test.py | 3 ++- .../scap_results_parser/data_structures/group.py | 3 ++- .../scap_results_parser/data_structures/oval_definition.py | 3 ++- .../scap_results_parser/data_structures/oval_node.py | 3 ++- .../scap_results_parser/data_structures/oval_object.py | 3 ++- .../scap_results_parser/data_structures/oval_state.py | 3 ++- .../scap_results_parser/data_structures/oval_test.py | 3 ++- .../scap_results_parser/data_structures/oval_variable.py | 3 ++- .../scap_results_parser/data_structures/profile_info.py | 3 ++- .../scap_results_parser/data_structures/reference.py | 3 ++- .../scap_results_parser/data_structures/report.py | 3 ++- .../scap_results_parser/data_structures/result_of_scan.py | 3 ++- openscap_report/scap_results_parser/data_structures/rule.py | 3 ++- .../scap_results_parser/parsers/oval_result_parser.py | 3 ++- openscap_report/scap_results_parser/parsers/rule_parser.py | 1 + tests/test_utils.py | 3 ++- 18 files changed, 36 insertions(+), 20 deletions(-) diff --git a/openscap_report/dataclasses/__init__.py b/openscap_report/dataclasses/__init__.py index 7a171173..94867c8e 100644 --- a/openscap_report/dataclasses/__init__.py +++ b/openscap_report/dataclasses/__init__.py @@ -1,4 +1,4 @@ try: - from dataclasses import dataclass, asdict, field, replace + from dataclasses import asdict, dataclass, field, replace except ImportError: - from .dataclasses import dataclass, asdict, field, replace + from .dataclasses import asdict, dataclass, field, replace diff --git a/openscap_report/dataclasses/dataclasses.py b/openscap_report/dataclasses/dataclasses.py index a98a7f51..92ff3d00 100644 --- a/openscap_report/dataclasses/dataclasses.py +++ b/openscap_report/dataclasses/dataclasses.py @@ -1,9 +1,9 @@ -import re -import sys import copy -import types import inspect import keyword +import re +import sys +import types __all__ = [ "dataclass", diff --git a/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py b/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py index 8221dc91..c0544a6e 100644 --- a/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py +++ b/openscap_report/scap_results_parser/data_structures/cpe_logical_test.py @@ -2,9 +2,10 @@ # SPDX-License-Identifier: LGPL-2.1-or-later from collections import Counter -from openscap_report.dataclasses import asdict, dataclass, field from typing import List +from openscap_report.dataclasses import asdict, dataclass, field + from .cpe_result_eval import EMPTY_RESULT, OVAL_RESULT_TO_CPE_RESULT, CpeResult from .oval_node import OvalNode diff --git a/openscap_report/scap_results_parser/data_structures/group.py b/openscap_report/scap_results_parser/data_structures/group.py index 21a98061..dd968abe 100644 --- a/openscap_report/scap_results_parser/data_structures/group.py +++ b/openscap_report/scap_results_parser/data_structures/group.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import List +from openscap_report.dataclasses import asdict, dataclass, field + GROUP_JSON_KEYS = [ "group_id", "title", diff --git a/openscap_report/scap_results_parser/data_structures/oval_definition.py b/openscap_report/scap_results_parser/data_structures/oval_definition.py index 693af105..7f459d6d 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_definition.py +++ b/openscap_report/scap_results_parser/data_structures/oval_definition.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import List +from openscap_report.dataclasses import asdict, dataclass, field + from .oval_node import OvalNode from .oval_reference import OvalReference diff --git a/openscap_report/scap_results_parser/data_structures/oval_node.py b/openscap_report/scap_results_parser/data_structures/oval_node.py index a924f5b3..6b0f71b2 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_node.py +++ b/openscap_report/scap_results_parser/data_structures/oval_node.py @@ -4,9 +4,10 @@ import json import logging from collections import Counter -from openscap_report.dataclasses import asdict, dataclass, field from typing import List +from openscap_report.dataclasses import asdict, dataclass, field + from .oval_result_eval import (EMPTY_RESULT, FULL_RESULT_TO_SHORT_RESULT, SHORT_RESULT_TO_FULL_RESULT, OvalResult) from .oval_test import OvalTest diff --git a/openscap_report/scap_results_parser/data_structures/oval_object.py b/openscap_report/scap_results_parser/data_structures/oval_object.py index f700ae07..33a6cb7e 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_object.py +++ b/openscap_report/scap_results_parser/data_structures/oval_object.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict +from openscap_report.dataclasses import asdict, dataclass, field + @dataclass class OvalObjectMessage: diff --git a/openscap_report/scap_results_parser/data_structures/oval_state.py b/openscap_report/scap_results_parser/data_structures/oval_state.py index e4ca387e..f39478f8 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_state.py +++ b/openscap_report/scap_results_parser/data_structures/oval_state.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict +from openscap_report.dataclasses import asdict, dataclass, field + @dataclass class OvalState: diff --git a/openscap_report/scap_results_parser/data_structures/oval_test.py b/openscap_report/scap_results_parser/data_structures/oval_test.py index d75a0298..e657b8f2 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_test.py +++ b/openscap_report/scap_results_parser/data_structures/oval_test.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List, Union +from openscap_report.dataclasses import asdict, dataclass, field + from .oval_object import OvalObject from .oval_state import OvalState from .oval_variable import OvalVariable diff --git a/openscap_report/scap_results_parser/data_structures/oval_variable.py b/openscap_report/scap_results_parser/data_structures/oval_variable.py index 809e5b7f..c325d853 100644 --- a/openscap_report/scap_results_parser/data_structures/oval_variable.py +++ b/openscap_report/scap_results_parser/data_structures/oval_variable.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict +from openscap_report.dataclasses import asdict, dataclass, field + @dataclass class OvalVariable: diff --git a/openscap_report/scap_results_parser/data_structures/profile_info.py b/openscap_report/scap_results_parser/data_structures/profile_info.py index 9b29539a..733827ec 100644 --- a/openscap_report/scap_results_parser/data_structures/profile_info.py +++ b/openscap_report/scap_results_parser/data_structures/profile_info.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List +from openscap_report.dataclasses import asdict, dataclass, field + PROFILE_JSON_KEYS = [ "profile_id", "description", diff --git a/openscap_report/scap_results_parser/data_structures/reference.py b/openscap_report/scap_results_parser/data_structures/reference.py index c1020ecd..8def5ca1 100644 --- a/openscap_report/scap_results_parser/data_structures/reference.py +++ b/openscap_report/scap_results_parser/data_structures/reference.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass from typing import List +from openscap_report.dataclasses import asdict, dataclass + REFERENCE_JSON_KEYS = [ "name", "href", diff --git a/openscap_report/scap_results_parser/data_structures/report.py b/openscap_report/scap_results_parser/data_structures/report.py index 962a54d3..326c476e 100644 --- a/openscap_report/scap_results_parser/data_structures/report.py +++ b/openscap_report/scap_results_parser/data_structures/report.py @@ -2,9 +2,10 @@ # SPDX-License-Identifier: LGPL-2.1-or-later import logging -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict +from openscap_report.dataclasses import asdict, dataclass, field + from ..exceptions import MissingProcessableRules from .group import GROUP_JSON_KEYS, Group from .identifier import IDENTIFIER_JSON_KEYS diff --git a/openscap_report/scap_results_parser/data_structures/result_of_scan.py b/openscap_report/scap_results_parser/data_structures/result_of_scan.py index fa795659..fd2d4463 100644 --- a/openscap_report/scap_results_parser/data_structures/result_of_scan.py +++ b/openscap_report/scap_results_parser/data_structures/result_of_scan.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List +from openscap_report.dataclasses import asdict, dataclass, field + SCAN_JSON_KEYS = [ "title", "identity", diff --git a/openscap_report/scap_results_parser/data_structures/rule.py b/openscap_report/scap_results_parser/data_structures/rule.py index 91b72407..acba4579 100644 --- a/openscap_report/scap_results_parser/data_structures/rule.py +++ b/openscap_report/scap_results_parser/data_structures/rule.py @@ -1,9 +1,10 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later -from openscap_report.dataclasses import asdict, dataclass, field from typing import Dict, List +from openscap_report.dataclasses import asdict, dataclass, field + from .cpe_platform import Platform from .identifier import Identifier from .oval_definition import OvalDefinition diff --git a/openscap_report/scap_results_parser/parsers/oval_result_parser.py b/openscap_report/scap_results_parser/parsers/oval_result_parser.py index 889b6d45..d8276d1b 100644 --- a/openscap_report/scap_results_parser/parsers/oval_result_parser.py +++ b/openscap_report/scap_results_parser/parsers/oval_result_parser.py @@ -3,10 +3,11 @@ import logging import uuid -from openscap_report.dataclasses import dataclass from lxml.etree import Element +from openscap_report.dataclasses import dataclass + from ..data_structures import OvalNode from ..exceptions import MissingOVALResult from ..namespaces import NAMESPACES diff --git a/openscap_report/scap_results_parser/parsers/rule_parser.py b/openscap_report/scap_results_parser/parsers/rule_parser.py index f7ba94a0..318fbbed 100644 --- a/openscap_report/scap_results_parser/parsers/rule_parser.py +++ b/openscap_report/scap_results_parser/parsers/rule_parser.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: LGPL-2.1-or-later import collections + from openscap_report.dataclasses import replace from ..data_structures import Identifier, Reference, Rule, RuleWarning diff --git a/tests/test_utils.py b/tests/test_utils.py index a1ec8a5c..241d944e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,9 +3,10 @@ import argparse import tempfile -from openscap_report.dataclasses import replace from pathlib import Path +from openscap_report.dataclasses import replace + try: from functools import cache except ImportError: From dfb8cfa828bcf9d2d5ea372f649cc68968bb52f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 16:54:35 +0200 Subject: [PATCH 8/9] Address or waive linters issues --- openscap_report/dataclasses/dataclasses.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openscap_report/dataclasses/dataclasses.py b/openscap_report/dataclasses/dataclasses.py index 92ff3d00..b2086afd 100644 --- a/openscap_report/dataclasses/dataclasses.py +++ b/openscap_report/dataclasses/dataclasses.py @@ -526,8 +526,8 @@ def _repr_fn(fields): ("self",), [ 'return self.__class__.__qualname__ + f"(' - + ", ".join([f"{f.name}={{self.{f.name}!r}}" for f in fields]) - + ')"' + + ", ".join([f"{f.name}={{self.{f.name}!r}}" for f in fields]) # noqa: W503 + + ')"' # noqa: W503 ], ) @@ -548,7 +548,7 @@ def _frozen_get_del_attr(cls, fields): ( f"if type(self) is cls or name in {fields_str}:", ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', - f"super(cls, self).__setattr__(name, value)", + "super(cls, self).__setattr__(name, value)", ), globals=globals, ), @@ -558,7 +558,7 @@ def _frozen_get_del_attr(cls, fields): ( f"if type(self) is cls or name in {fields_str}:", ' raise FrozenInstanceError(f"cannot delete field {name!r}")', - f"super(cls, self).__delattr__(name)", + "super(cls, self).__delattr__(name)", ), globals=globals, ), @@ -701,7 +701,7 @@ def _get_field(cls, a_name, a_type): if typing: if _is_classvar(a_type, typing) or ( isinstance(f.type, str) - and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) + and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) # noqa: W503 ): f._field_type = _FIELD_CLASSVAR @@ -713,7 +713,7 @@ def _get_field(cls, a_name, a_type): dataclasses = sys.modules[__name__] if _is_initvar(a_type, dataclasses) or ( isinstance(f.type, str) - and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) + and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) # noqa: W503 ): f._field_type = _FIELD_INITVAR @@ -801,7 +801,7 @@ def _hash_exception(cls, fields): # version of this table. -def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): +def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # noqa: C901 # Now that dicts retain insertion order, there's no reason to use # an ordered dict. I am leveraging that ordering here, because # derived class fields overwrite base class fields, but the order From 93f853edeed3a7e7923f79926ad0b8c8ed4aa3c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Cern=C3=BD?= Date: Mon, 22 Apr 2024 17:47:19 +0200 Subject: [PATCH 9/9] Fix or waive 'pylint' problems --- openscap_report/dataclasses/dataclasses.py | 55 ++++++++++++++++----- setup.py | 1 + tests/test_utils.py | 3 +- tests/unit_tests/test_scap_result_parser.py | 6 +-- 4 files changed, 48 insertions(+), 17 deletions(-) diff --git a/openscap_report/dataclasses/dataclasses.py b/openscap_report/dataclasses/dataclasses.py index b2086afd..b7942fe6 100644 --- a/openscap_report/dataclasses/dataclasses.py +++ b/openscap_report/dataclasses/dataclasses.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines import copy import inspect import keyword @@ -155,6 +156,7 @@ class FrozenInstanceError(AttributeError): # A sentinel object for default values to signal that a default # factory will be used. This is given a nice repr() which will appear # in the function signature of dataclasses' constructors. +# pylint: disable=invalid-name class _HAS_DEFAULT_FACTORY_CLASS: def __repr__(self): return "" @@ -165,6 +167,7 @@ def __repr__(self): # A sentinel object to detect if a parameter is supplied or not. Use # a class to give it a better repr. +# pylint: disable=invalid-name class _MISSING_TYPE: pass @@ -177,6 +180,7 @@ class _MISSING_TYPE: # Markers for the various kinds of fields and pseudo-fields. +# pylint: disable=invalid-name class _FIELD_BASE: def __init__(self, name): self.name = name @@ -226,6 +230,7 @@ class InitVar(metaclass=_InitVarMeta): # # When cls._FIELDS is filled in with a list of Field objects, the name # and type fields will have been populated. +# pylint: disable=too-many-instance-attributes class Field: __slots__ = ( "name", @@ -240,6 +245,8 @@ class Field: "_field_type", # Private: not to be used by user code. ) + # pylint: disable=too-many-arguments + # pylint: disable=redefined-builtin def __init__(self, default, default_factory, init, repr, hash, compare, metadata): self.name = None self.type = None @@ -298,6 +305,8 @@ class _DataclassParams: "frozen", ) + # pylint: disable=too-many-arguments + # pylint: disable=redefined-builtin def __init__(self, init, repr, eq, order, unsafe_hash, frozen): self.init = init self.repr = repr @@ -322,6 +331,8 @@ def __repr__(self): # This function is used instead of exposing Field creation directly, # so that a type checker can be told (via overloads) that this is a # function whose type depends on its parameters. +# pylint: disable=too-many-arguments +# pylint: disable=redefined-builtin def field( *, default=MISSING, @@ -351,6 +362,7 @@ def field( return Field(default, default_factory, init, repr, hash, compare, metadata) +# pylint: disable=redefined-outer-name def _tuple_str(obj_name, fields): # Return a string representing each field of obj_name as a tuple # member. So, if fields is ['x', 'y'] and obj_name is "self", @@ -379,6 +391,7 @@ def _create_fn(name, args, body, *, globals=None, locals=None, return_type=MISSI # Compute the text of the entire function. txt = f"def {name}({args}){return_annotation}:\n{body}" + # pylint: disable=exec-used exec(txt, globals, locals) return locals[name] @@ -444,6 +457,7 @@ def _field_init(f, frozen, globals, self_name): # Only test this now, so that we can create variables for the # default. However, return None to signify that we're not going # to actually do the assignment statement for InitVars. + # pylint: disable=protected-access if f._field_type is _FIELD_INITVAR: return None @@ -502,6 +516,7 @@ def _init_fn(fields, frozen, has_post_init, self_name): # Does this class have a post-init function? if has_post_init: + # pylint: disable=protected-access params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR) body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})") @@ -533,6 +548,7 @@ def _repr_fn(fields): def _frozen_get_del_attr(cls, fields): + # pylint: disable=fixme # XXX: globals is modified on the first call to _create_fn, then # the modified version is used in the second call. Is this okay? globals = {"cls": cls, "FrozenInstanceError": FrozenInstanceError} @@ -590,6 +606,8 @@ def _hash_fn(fields): def _is_classvar(a_type, typing): # This test uses a typing internal class, but it's the best way to # test if this is a ClassVar. + # pylint: disable=protected-access + # pylint: disable=unidiomatic-typecheck return type(a_type) is typing._ClassVar @@ -681,6 +699,7 @@ def _get_field(cls, a_name, a_type): # Assume it's a normal field until proven otherwise. We're next # going to decide if it's a ClassVar or InitVar, everything else # is just a normal field. + # pylint: disable=protected-access f._field_type = _FIELD # In addition to checking for actual types here, also check for @@ -703,10 +722,12 @@ def _get_field(cls, a_name, a_type): isinstance(f.type, str) and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) # noqa: W503 ): + # pylint: disable=protected-access f._field_type = _FIELD_CLASSVAR # If the type is InitVar, or if it's a matching string annotation, # then it's an InitVar. + # pylint: disable=protected-access if f._field_type is _FIELD: # The module we're checking against is the module we're # currently in (dataclasses.py). @@ -715,6 +736,7 @@ def _get_field(cls, a_name, a_type): isinstance(f.type, str) and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) # noqa: W503 ): + # pylint: disable=protected-access f._field_type = _FIELD_INITVAR # Validations for individual fields. This is delayed until now, @@ -722,6 +744,7 @@ def _get_field(cls, a_name, a_type): # know the field name, which allows for better error reporting. # Special restrictions for ClassVar and InitVar. + # pylint: disable=protected-access if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR): if f.default_factory is not MISSING: raise TypeError(f"field {f.name} cannot have a " "default factory") @@ -732,6 +755,7 @@ def _get_field(cls, a_name, a_type): # ClassVar and InitVar to specify init=. # For real fields, disallow mutable defaults for known types. + # pylint: disable=protected-access if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): raise ValueError( f"mutable default {type(f.default)} for field " @@ -755,11 +779,12 @@ def _set_new_attribute(cls, name, value): # take. The common case is to do nothing, so instead of providing a # function that is a no-op, use None to signify that. - +# pylint: disable=unused-argument def _hash_set_none(cls, fields): return None +# pylint: disable=unused-argument def _hash_add(cls, fields): flds = [f for f in fields if (f.compare if f.hash is None else f.hash)] return _hash_fn(flds) @@ -801,6 +826,9 @@ def _hash_exception(cls, fields): # version of this table. +# pylint: disable=too-many-locals +# pylint: disable=too-many-branches +# pylint: disable=too-many-statements def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # noqa: C901 # Now that dicts retain insertion order, there's no reason to use # an ordered dict. I am leveraging that ordering here, because @@ -873,11 +901,11 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # noqa: C9 if has_dataclass_bases: # Raise an exception if any of our bases are frozen, but we're not. if any_frozen_base and not frozen: - raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one") + raise TypeError("cannot inherit non-frozen dataclass from a frozen one") # Raise an exception if we're frozen, but none of our bases are. if not any_frozen_base and frozen: - raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one") + raise TypeError("cannot inherit frozen dataclass from a non-frozen one") # Remember all of the fields on our class (including bases). This # also marks this class as being a dataclass. @@ -903,6 +931,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # noqa: C9 has_post_init = hasattr(cls, _POST_INIT_NAME) # Include InitVars and regular fields (so, not ClassVars). + # pylint: disable=protected-access flds = [f for f in fields.values() if f._field_type in (_FIELD, _FIELD_INITVAR)] _set_new_attribute( cls, @@ -920,6 +949,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # noqa: C9 # Get the fields as a list, and include only real fields. This is # used in all of the following methods. + # pylint: disable=protected-access field_list = [f for f in fields.values() if f._field_type is _FIELD] if repr: @@ -1028,10 +1058,12 @@ def fields(class_or_instance): try: fields = getattr(class_or_instance, _FIELDS) except AttributeError: + # pylint: disable=raise-missing-from raise TypeError("must be called with a dataclass type or instance") # Exclude pseudo-fields. Note that fields is sorted by insertion # order, so the order of the tuple is as the fields were defined. + # pylint: disable=protected-access return tuple(f for f in fields.values() if f._field_type is _FIELD) @@ -1077,15 +1109,14 @@ def _asdict_inner(obj, dict_factory): value = _asdict_inner(getattr(obj, f.name), dict_factory) result.append((f.name, value)) return dict_factory(result) - elif isinstance(obj, (list, tuple)): + if isinstance(obj, (list, tuple)): return type(obj)(_asdict_inner(v, dict_factory) for v in obj) - elif isinstance(obj, dict): + if isinstance(obj, dict): return type(obj)( (_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory)) for k, v in obj.items() ) - else: - return copy.deepcopy(obj) + return copy.deepcopy(obj) def astuple(obj, *, tuple_factory=tuple): @@ -1119,15 +1150,14 @@ def _astuple_inner(obj, tuple_factory): value = _astuple_inner(getattr(obj, f.name), tuple_factory) result.append(value) return tuple_factory(result) - elif isinstance(obj, (list, tuple)): + if isinstance(obj, (list, tuple)): return type(obj)(_astuple_inner(v, tuple_factory) for v in obj) - elif isinstance(obj, dict): + if isinstance(obj, dict): return type(obj)( (_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory)) for k, v in obj.items() ) - else: - return copy.deepcopy(obj) + return copy.deepcopy(obj) def make_dataclass( @@ -1239,9 +1269,9 @@ class C: # It's an error to have init=False fields in 'changes'. # If a field is not in 'changes', read its value from the provided obj. - for f in getattr(obj, _FIELDS).values(): # Only consider normal fields or InitVars. + # pylint: disable=protected-access if f._field_type is _FIELD_CLASSVAR: continue @@ -1256,6 +1286,7 @@ class C: continue if f.name not in changes: + # pylint: disable=protected-access if f._field_type is _FIELD_INITVAR: raise ValueError( f"InitVar {f.name!r} " "must be specified with replace()" diff --git a/setup.py b/setup.py index ba42f33a..a6001b18 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,7 @@ # Copyright 2022, Red Hat, Inc. # SPDX-License-Identifier: LGPL-2.1-or-later +# pylint: disable=import-error from setuptools import find_packages, setup diff --git a/tests/test_utils.py b/tests/test_utils.py index 241d944e..7662a6fd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,8 +5,6 @@ import tempfile from pathlib import Path -from openscap_report.dataclasses import replace - try: from functools import cache except ImportError: @@ -16,6 +14,7 @@ from lxml import etree +from openscap_report.dataclasses import replace from openscap_report.scap_results_parser import SCAPResultsParser from openscap_report.scap_results_parser.data_structures import OvalDefinition from openscap_report.scap_results_parser.namespaces import NAMESPACES diff --git a/tests/unit_tests/test_scap_result_parser.py b/tests/unit_tests/test_scap_result_parser.py index 467f1861..6d77333f 100644 --- a/tests/unit_tests/test_scap_result_parser.py +++ b/tests/unit_tests/test_scap_result_parser.py @@ -55,7 +55,7 @@ def test_validation(file_path, result): assert parser.validate(ARF_SCHEMAS_PATH) == result -class does_not_raise(object): +class DoesNotRaise: def __init__(self, enter_result=None): self.enter_result = enter_result @@ -68,8 +68,8 @@ def __exit__(self, *excinfo): @pytest.mark.unit_test @pytest.mark.parametrize("file_path, expectation, e_msg", [ - (PATH_TO_ARF, does_not_raise(), ""), - (PATH_TO_XCCDF, does_not_raise(), "input is the XCCDF"), + (PATH_TO_ARF, DoesNotRaise(), ""), + (PATH_TO_XCCDF, DoesNotRaise(), "input is the XCCDF"), (PATH_TO_EMPTY_XML_FILE, pytest.raises(NotSupportedReportingFormat), "isn't a valid"), (PATH_TO_EMPTY_FILE, pytest.raises(XMLSyntaxError), "empty"), (PATH_TO_XML_FILE, pytest.raises(NotSupportedReportingFormat), "isn't a valid"),