Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for groups to environments #21

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions config_specs/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ data:

### OPTIONAL FIELDS ###

# Add packages from a group to the environment
groups:
- Core

# Architecture-specific packages.
#
# (optional field)
Expand Down
4 changes: 2 additions & 2 deletions config_specs/workload.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,9 @@ data:
modules_enable:
- module:stream

# Analyze a group
# Add packages from a group to the workload
groups:
- core
- Core

# Add packages to the workload that don't exist (yet) in the repositories.
package_placeholders:
Expand Down
41 changes: 32 additions & 9 deletions feedback_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,12 @@ def _load_config_env(document_id, document, settings):
for pkg in document["data"]["packages"]:
config["packages"].append(str(pkg))

# Comps groups
config["groups"] = []
if "groups" in document["data"]:
for module in document["data"]["groups"]:
config["groups"].append(module)

# Labels connect things together.
# Workloads get installed in environments with the same label.
# They also get included in views with the same label.
Expand Down Expand Up @@ -1144,6 +1150,24 @@ def _analyze_env(tmp_dnf_cachedir, tmp_installroots, env_conf, repo, arch):
env["errors"]["non_existing_pkgs"].append(pkg)
continue

# Groups
log(" Adding groups...")
if env_conf["groups"]:
base.read_comps(arch_filter=True)
for grp_spec in env_conf["groups"]:
group = base.comps.group_by_pattern(grp_spec)
if not group:
env["errors"]["non_existing_pkgs"].append(grp_spec)
continue
base.group_install(group.id, ['mandatory', 'default'])

# Mark packages as required
pkgs_from_groups = []
for pkg in group.packages_iter():
if pkg.name not in env_conf["packages"]:
pkgs_from_groups.append(pkg.name)
env_conf["packages"].extend(pkgs_from_groups)

# Architecture-specific packages
for pkg in env_conf["arch_packages"][arch]:
try:
Expand Down Expand Up @@ -1429,15 +1453,14 @@ def _analyze_workload(tmp_dnf_cachedir, tmp_installroots, workload_conf, env_con
workload["errors"]["non_existing_pkgs"].append(grp_spec)
continue
base.group_install(group.id, ['mandatory', 'default'])


# TODO: Mark group packages as required... the following code doesn't work
#for pkg in group.packages_iter():
# print(pkg.name)
# workload_conf["packages"].append(pkg.name)




# Mark group packages as required
pkgs_from_groups = []
for pkg in group.packages_iter():
if pkg.name not in workload_conf["packages"]:
pkgs_from_groups.append(pkg.name)
workload_conf["packages"].extend(pkgs_from_groups)

# Filter out the relevant package placeholders for this arch
package_placeholders = {}
for placeholder_name,placeholder_data in workload_conf["package_placeholders"].items():
Expand Down