Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

plugin metadata loader with pytest and bash example #136

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
97 changes: 66 additions & 31 deletions fmf/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,10 @@

import fmf.context
import fmf.utils as utils
from fmf.utils import dict_to_yaml, log

# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Constants
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

SUFFIX = ".fmf"
MAIN = "main" + SUFFIX
IGNORED_DIRECTORIES = ['/dev', '/proc', '/sys']
from fmf.constants import (CONFIG_FILE_NAME, CONFIG_PLUGIN,
IGNORED_DIRECTORIES, MAIN, SUFFIX)
from fmf.plugin_loader import get_plugin_for_file, get_suffixes
from fmf.utils import FileSorting, dict_to_yaml, log

# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# YAML
Expand Down Expand Up @@ -96,10 +91,11 @@ def __init__(self, data, name=None, parent=None):
self.original_data = dict()
self._commit = None
self._raw_data = dict()
self._plugin = None
self._config = dict()
# Track whether the data dictionary has been updated
# (needed to prevent removing nodes with an empty dict).
self._updated = False

# Special handling for top parent
if self.parent is None:
self.name = "/"
Expand All @@ -109,6 +105,7 @@ def __init__(self, data, name=None, parent=None):
# Handle child node creation
else:
self.root = self.parent.root
self._config = self.parent._config
self.name = os.path.join(self.parent.name, name)

# Update data from a dictionary (handle empty nodes)
Expand Down Expand Up @@ -180,6 +177,11 @@ def _initialize(self, path):
"Unable to detect format version: {0}".format(error))
except ValueError:
raise utils.FormatError("Invalid version format")
# try to read fmf config
config_file = os.path.join(self.root, ".fmf", CONFIG_FILE_NAME)
if os.path.exists(config_file):
with open(config_file) as fd:
self._config = yaml.safe_load(fd)

def _merge_plus(self, data, key, value):
""" Handle extending attributes using the '+' suffix """
Expand Down Expand Up @@ -418,7 +420,7 @@ def get(self, name=None, default=None):
return default
return data

def child(self, name, data, source=None):
def child(self, name, data, source=None, plugin=None):
""" Create or update child with given data """
try:
# Update data from a dictionary (handle empty nodes)
Expand All @@ -433,6 +435,7 @@ def child(self, name, data, source=None):
if source is not None:
self.children[name].sources.append(source)
self.children[name]._raw_data = copy.deepcopy(data)
self.children[name]._plugin = plugin

def grow(self, path):
"""
Expand All @@ -454,25 +457,35 @@ def grow(self, path):
except StopIteration:
log.debug("Skipping '{0}' (not accessible).".format(path))
return
# Investigate main.fmf as the first file (for correct inheritance)
filenames = sorted(
[filename for filename in filenames if filename.endswith(SUFFIX)])
try:
filenames.insert(0, filenames.pop(filenames.index(MAIN)))
except ValueError:
pass

filenames_sorted = sorted([FileSorting(filename) for filename in filenames if any(
filter(filename.endswith, get_suffixes(*self._config.get(CONFIG_PLUGIN, []))))])
# Check every metadata file and load data (ignore hidden)
for filename in filenames:
for filename in [filename.value for filename in filenames_sorted]:
if filename.startswith("."):
continue
fullpath = os.path.abspath(os.path.join(dirpath, filename))
log.info("Checking file {0}".format(fullpath))
try:
with open(fullpath, encoding='utf-8') as datafile:
data = yaml.load(datafile, Loader=YamlLoader)
except yaml.error.YAMLError as error:
raise(utils.FileError("Failed to parse '{0}'.\n{1}".format(
fullpath, error)))
if fullpath.endswith(SUFFIX):
plugin = None
try:
with open(fullpath, encoding='utf-8') as datafile:
data = yaml.load(datafile, Loader=YamlLoader)
except yaml.error.YAMLError as error:
raise (
utils.FileError(
"Failed to parse '{0}'.\n{1}".format(
fullpath, error)))
else:
data = None
plugin = get_plugin_for_file(
fullpath, *self._config.get(CONFIG_PLUGIN, []))
log.debug("Used plugin {}".format(plugin))
if plugin:
data = plugin().read(fullpath)
# ignore results of output if there is None
if data is None:
continue
log.data(pretty(data))
# Handle main.fmf as data for self
if filename == MAIN:
Expand All @@ -481,7 +494,11 @@ def grow(self, path):
self.update(data)
# Handle other *.fmf files as children
else:
self.child(os.path.splitext(filename)[0], data, fullpath)
self.child(
os.path.splitext(filename)[0],
data,
fullpath,
plugin=plugin)
# Explore every child directory (ignore hidden dirs and subtrees)
for dirname in sorted(dirnames):
if dirname.startswith("."):
Expand Down Expand Up @@ -673,7 +690,7 @@ def _locate_raw_data(self):
node_data = node_data[key]

# The full raw data were read from the last source
return node_data, full_data, node.sources[-1]
return node_data, full_data, node.sources[-1], hierarchy, node._plugin

def __enter__(self):
"""
Expand All @@ -698,13 +715,31 @@ def __enter__(self):
export to yaml does not preserve this information. The feature
is experimental and can be later modified, use at your own risk.
"""
return self._locate_raw_data()[0]
item = self._locate_raw_data()[0]
self._raw_data_before_modification = copy.deepcopy(item)
return item

def __exit__(self, exc_type, exc_val, exc_tb):
""" Experimental: Store modified metadata to disk """
_, full_data, source = self._locate_raw_data()
with open(source, "w", encoding='utf-8') as file:
file.write(dict_to_yaml(full_data))
node_data, full_data, source, hierarchy, plugin = self._locate_raw_data()
# find differences for plugins, to be able to work effectively
append = dict()
modified = dict()
for k, v in node_data.items():
if k not in self._raw_data_before_modification:
append[k] = v
elif self._raw_data_before_modification[k] != v:
modified[k] = v
deleted = list()
for k in self._raw_data_before_modification:
if k not in node_data:
deleted.append(k)

if plugin is None:
with open(source, "w", encoding='utf-8') as file:
file.write(dict_to_yaml(full_data))
else:
plugin().write(source, hierarchy, node_data, append, modified, deleted)

def __getitem__(self, key):
"""
Expand Down
11 changes: 11 additions & 0 deletions fmf/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Constants
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

SUFFIX = ".fmf"
MAIN = "main" + SUFFIX
IGNORED_DIRECTORIES = ['/dev', '/proc', '/sys']
# comma separated list for plugin env var
PLUGIN_ENV = "PLUGINS"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

FMF_PLUGINS to mitigate name conflict

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, sounds reasonable.

CONFIG_FILE_NAME = "config"
CONFIG_PLUGIN = "plugins"
105 changes: 105 additions & 0 deletions fmf/plugin_loader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import importlib
import inspect
import os
import re
from functools import lru_cache
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fmf should support also python2.7 where this is not available.


import yaml

from fmf.constants import PLUGIN_ENV, SUFFIX
from fmf.utils import log


class Plugin:
"""
Main abstact class for FMF plugins
"""
# you have to define extension list as class attribute e.g. [".py"]
extensions = list()
file_patters = list()

def read(self, filename):
"""
return python dictionary representation of metadata inside file (FMF structure)
"""
raise NotImplementedError("Define own impementation")

@staticmethod
def __define_undefined(hierarchy, modified, append):
output = dict()
current = output
for key in hierarchy:
if key not in current or current[key] is None:
current[key] = dict()
current = current[key]
for k, v in modified.items():
current[k] = v
for k, v in append.items():
current[k] = v
return output

def write(
self, filename, hierarchy, data, append_dict, modified_dict,
deleted_items):
"""
Write data in dictionary representation back to file, if not defined, create new fmf file with same name.
When created, nodes will not use plugin method anyway
"""
path = os.path.dirname(filename)
basename = os.path.basename(filename)
current_extension = list(
filter(
lambda x: basename.endswith(x),
self.extensions))[0]
without_extension = basename[0:-len(list(current_extension))]
fmf_file = os.path.join(path, without_extension + ".fmf")
with open(fmf_file, "w") as fd:
yaml.safe_dump(
self.__define_undefined(
hierarchy,
modified_dict,
append_dict),
stream=fd)


@lru_cache(maxsize=None)
def enabled_plugins(*plugins):
plugins = os.getenv(PLUGIN_ENV).split(
",") if os.getenv(PLUGIN_ENV) else plugins
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe TMT splits on space, let's be consistent.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I can change it to whatever more consistent solution, I also thought to use : as std path separator, or but is worse thanks to it is more common to have spaces in paths, than : or , chars

plugin_list = list()
for item in plugins:
if os.path.exists(item):
loader = importlib.machinery.SourceFileLoader(
os.path.basename(item), item)
module = importlib.util.module_from_spec(
importlib.util.spec_from_loader(loader.name, loader)
)
loader.exec_module(module)
else:
module = importlib.import_module(item)
for name, plugin in inspect.getmembers(module):
if inspect.isclass(plugin) and plugin != Plugin and issubclass(
plugin, Plugin):
plugin_list.append(plugin)
log.info("Loaded plugin {}".format(plugin))
return plugin_list


def get_suffixes(*plugins):
output = [SUFFIX]
for item in enabled_plugins(*plugins):
output += item.extensions
return output


def get_plugin_for_file(filename, *plugins):
extension = "." + filename.rsplit(".", 1)[1]
for item in enabled_plugins(*plugins):
if extension in item.extensions and any(
filter(
lambda x: re.search(
x,
filename),
item.file_patters)):
log.debug("File {} parsed by by plugin {}".format(filename, item))
return item
Empty file added fmf/plugins/__init__.py
Empty file.
27 changes: 27 additions & 0 deletions fmf/plugins/bash.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import os
import re

from fmf.plugin_loader import Plugin
from fmf.utils import log


class Bash(Plugin):
extensions = [".sh"]
file_patters = ["test.*"]

@staticmethod
def update_data(filename, pattern="^#.*:FMF:"):
out = dict(test="./" + os.path.basename(filename))
with open(filename) as fd:
for line in fd.readlines():
if re.match(pattern, line):
item = re.match(
r"{}\s*(.*)".format(pattern),
line).groups()[0]
identifier, value = item.split(":", 1)
out[identifier] = value.lstrip(" ")
return out

def read(self, file_name):
log.info("Processing Item: {}".format(file_name))
return self.update_data(file_name)
4 changes: 4 additions & 0 deletions fmf/plugins/pytest/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from fmf.plugins.pytest.plugin import Pytest
from fmf.plugins.pytest.tmt_semantic import TMT

__all__ = [Pytest.__name__, TMT.__name__]
13 changes: 13 additions & 0 deletions fmf/plugins/pytest/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
CONFIG_POSTPROCESSING_TEST = "test_postprocessing"
PYTEST_DEFAULT_CONF = {
CONFIG_POSTPROCESSING_TEST: {
"test": """
cls_str = ("::" + str(cls.name)) if cls.name else ""
escaped = shlex.quote(filename + cls_str + "::" + test.name)
f"python3 -m pytest -m '' -v {escaped}" """
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure about pytest as a require :/ As it is now it is a 'hidden' require - you need to remember. But on the other hand it leaves up to user whether rpm or pip install is better.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, it is now hidden, you are rigt, and theoretically it is also why I've let it configurable, not hardcoded. in my project fmf_metadata I have own configs for upstream and dowstream data generation. and in case you have just unittests then you an execute it directly via python unittest executor, or via some nose so that pytest is just default way.

}
}
CONFIG_MERGE_PLUS = "merge_plus"
CONFIG_MERGE_MINUS = "merge_minus"
CONFIG_ADDITIONAL_KEY = "additional_keys"
CONFIG_POSTPROCESSING_TEST = "test_postprocessing"
Loading