Skip to content

Commit

Permalink
Added support for pre-release containers (release-engineering#180)
Browse files Browse the repository at this point in the history
* Added support for pre-release containers

Containers with 'com.redhat.pre-release' label are released with
different tags and to different index image (if they are also operators)
  • Loading branch information
midnightercz authored Aug 15, 2023
1 parent 315b717 commit 61276cc
Show file tree
Hide file tree
Showing 7 changed files with 571 additions and 109 deletions.
3 changes: 1 addition & 2 deletions pubtools/_quay/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,4 @@ class BuildIndexImageParam:
target_settings: Dict[str, Any]
tag: str
signing_keys: List[str]
is_hotfix: bool
hotfix_tag: str
destination_tags: List[str]
210 changes: 144 additions & 66 deletions pubtools/_quay/operator_pusher.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,35 +446,18 @@ def ensure_bundles_present(self):
LOG.info("Bundle {0} is present".format(bundle))
return True

@log_step("Build index images")
def build_index_images(self):
"""
Perform the 'build' part of the operator workflow.
This workflow is a part of push-docker operation.
The workflow can be summarized as:
- Use Pyxis to parse 'com.redhat.openshift.versions'
- Get deprecation list for a given version (list of bundles to be deprecated)
- Create mapping of which bundles should be pushed to which index image versions
- Contact IIB to add the bundles to the index images with multiple threads
def _get_fbc_opted_in_items(self):
"""Get items that are opted in for fbc.
Returns ({str:dict}):
Dictionary containing IIB results and signing keys for all OPM versions. Data will be
used in operator signing. Dictionary structure:
{
"version": {
"iib_result": (...) (object returned by iiblib)
"signing_keys": [...] (list of signing keys to be used for signing)
}
}
An item needs to be targeted for repos with fbc_opt_in set to True and
ocp versions needs to be higher than 4.12. Inconsistencies in versions
(like support for both > 4.12 and <= 4.12) results in item error.
"""
iib_results = {}
repos_opted_in = {}
items_opted_in = {}
failed_items = {}

# We need to load pyxis resolved versions at this point

versions_mapping = self.version_items_mapping # noqa: F841

for item in self.push_items:
Expand Down Expand Up @@ -522,6 +505,111 @@ def build_index_images(self):
)
)
failed_items[id(item)] = True
return items_opted_in, failed_items

def _get_non_fbc_items_for_version(self, items, version, items_opted_in):
"""Return non fbc items for given ocp version.
Args:
items: List[ContainerPushItem]
list of push items
version: str
ocp version for which items should be returned.
items_opted_in: Dict[int, bool]
list of push items opted in fbc.
Returns List[ContainerPushItem]:
list of items not opted in fbc
"""
non_fbc_items = []
osev_tuple = tuple([int(x) for x in version.replace("v", "").split(".")])
for item in items:
if not items_opted_in[id(item)] or (items_opted_in[id(item)] and osev_tuple <= (4, 12)):
non_fbc_items.append(item)
elif items_opted_in[id(item)] and osev_tuple >= (4, 13):
LOG.warning(
"Skipping {i}".format(i=item)
+ "from iib build as it's opted in for FBC and targeting OCP version >=4.13"
)
return non_fbc_items

def _create_item_groups_for_version(
self, non_fbc_items, version, is_hotfix=False, is_prerelease=False
):
"""Iterate thought non fbc items and group those together based on destination tag.
Args:
non_fbc_items: List[ContainerPushItem]
list of items not opted in fbc
version: str
ocp version
is_hotfix: bool
flag indicating items are for hotfix push
is_prerelease: bool
flag indicating items are for prerelease push
Returns Dict[str, Dict[str, Any]]:
Dictionary of items grouped by ocp version
"""
item_groups = {
version: {
"items": [],
"overwrite": True,
"destination_tags": [version],
}
}
if is_hotfix or is_prerelease:
for item in non_fbc_items:
tag_part = (
item.metadata["com.redhat.hotfix"]
if is_hotfix
else item.metadata["com.redhat.prerelease"]
)
dst_tag = "{0}-{1}-{2}".format(
version,
tag_part,
item.origin.split("-")[1].replace(":", "-"),
)
item_groups.setdefault(
item.origin,
{
"items": [],
"overwrite": False,
"destination_tags": [dst_tag],
},
)
item_groups[item.origin]["items"].append(item)
else:
for item in non_fbc_items:
item_groups[version]["items"].append(item)
return item_groups

@log_step("Build index images")
def build_index_images(self):
"""
Perform the 'build' part of the operator workflow.
This workflow is a part of push-docker operation.
The workflow can be summarized as:
- Use Pyxis to parse 'com.redhat.openshift.versions'
- Filter out push items which opted in to FBC and shouldn't be pushed
- Set extra attributes or push items are for prerelease or for hotfix
- Get deprecation list for a given version (list of bundles to be deprecated)
- Create mapping of which bundles should be pushed to which index image versions
- Contact IIB to add the bundles to the index images with multiple threads
Returns ({str:dict}):
Dictionary containing IIB results and signing keys for all OPM versions. Data will be
used in operator signing. Dictionary structure:
{
<target_tag>: {
"iib_result": (...) (object returned by iiblib)
"signing_keys": [...] (list of signing keys to be used for signing)
"destination_tags": [...] (list of destination tags)
}
}
"""
iib_results = {}

items_opted_in, failed_items = self._get_fbc_opted_in_items()

# if any of push items failed due to fbc issues, return early and skip all iib operations
if failed_items:
Expand All @@ -530,56 +618,43 @@ def build_index_images(self):
build_index_image_params = []

for version, items in sorted(self.version_items_mapping.items()):
non_fbc_items = []
osev_tuple = tuple([int(x) for x in version.replace("v", "").split(".")])
for item in items:
if not items_opted_in[id(item)] or (
items_opted_in[id(item)] and osev_tuple <= (4, 12)
):
non_fbc_items.append(item)
elif items_opted_in[id(item)] and osev_tuple >= (4, 13):
LOG.warning(
"Skipping {i}".format(i=item)
+ "from iib build as it's opted in for FBC and targeting OCP version >=4.13"
)
non_fbc_items = self._get_non_fbc_items_for_version(items, version, items_opted_in)

is_hotfix = any([item.metadata.get("com.redhat.hotfix") for item in non_fbc_items])
is_prerelease = any(
[item.metadata.get("com.redhat.prerelease") for item in non_fbc_items]
)
is_advisory_source = all(
[re.match(r"^[A-Z0-9:\-]{4,40}$", item.origin) for item in non_fbc_items]
)
item_groups = {}
if is_hotfix and not is_advisory_source:
raise ValueError("Cannot push hotfixes without an advisory")
if is_hotfix:
for item in non_fbc_items:
item_groups.setdefault(item.origin, []).append(item)
else:
item_groups["default"] = non_fbc_items
if is_prerelease and not is_advisory_source:
raise ValueError("Cannot push pre release without an advisory")

item_groups = self._create_item_groups_for_version(
non_fbc_items, version, is_hotfix, is_prerelease
)

# Get deprecation list
deprecation_list = self.get_deprecation_list(version)
for group, g_items in item_groups.items():
if not g_items:
for group, group_info in item_groups.items():
if not group_info["items"]:
continue
tag = version
index_image = "{image_repo}:{tag}".format(
image_repo=self.target_settings["iib_index_image"], tag=tag
)
build_tags = []
build_tags.append("{0}-{1}".format(index_image.split(":")[1], self.task_id))

build_tags = ["{0}-{1}".format(index_image.split(":")[1], self.task_id)]
if is_hotfix:
hotfix_tag = "{0}-{1}-{2}".format(
version,
g_items[0].metadata["com.redhat.hotfix"],
g_items[0].origin.split("-")[1].replace(":", "-"),
)
build_tags.append(hotfix_tag)

bundles = [self.public_bundle_ref(i) for i in g_items]
signing_keys = sorted(list(set([item.claims_signing_key for item in g_items])))
bundles = [self.public_bundle_ref(i) for i in group_info["items"]]
signing_keys = sorted(
list(set([item.claims_signing_key for item in group_info["items"]]))
)

if is_hotfix:
target_settings = self.target_settings.copy()
target_settings = self.target_settings.copy()
if not group_info["overwrite"]:
target_settings["iib_overwrite_from_index"] = False
target_settings["iib_overwrite_from_index_token"] = ""
else:
Expand All @@ -595,8 +670,7 @@ def build_index_images(self):
target_settings=target_settings,
tag=tag,
signing_keys=signing_keys,
is_hotfix=is_hotfix,
hotfix_tag="" if not is_hotfix else hotfix_tag,
destination_tags=group_info["destination_tags"],
)
)

Expand All @@ -622,8 +696,7 @@ def build_index_images(self):
iib_results[param.tag] = {
"iib_result": build_details,
"signing_keys": param.signing_keys,
"is_hotfix": param.is_hotfix,
"hotfix_tag": param.hotfix_tag,
"destination_tags": param.destination_tags,
}

return iib_results
Expand Down Expand Up @@ -662,10 +735,11 @@ def push_index_images(self, iib_results, tag_suffix=None):
repo=iib_intermediate_repo,
tag=build_details.build_tags[0],
)
if not results["is_hotfix"]:
dest_image = "{0}:{1}".format(index_image_repo, tag)
else:
dest_image = "{0}:{1}".format(index_image_repo, results["hotfix_tag"])
dest_images = [
"{0}:{1}".format(index_image_repo, dst_tag)
for dst_tag in results["destination_tags"]
]

# We don't use permanent index image here because we always want to overwrite
# production tags with the latest index image (in case of parallel pushes)
index_image_ts = self.target_settings.copy()
Expand All @@ -677,10 +751,14 @@ def push_index_images(self, iib_results, tag_suffix=None):
)

ContainerImagePusher.run_tag_images(
build_details.index_image, [dest_image], True, index_image_ts
build_details.index_image, dest_images, True, index_image_ts
)
if tag_suffix:
dest_image = "{0}:{1}-{2}".format(index_image_repo, tag, tag_suffix)
_dest_images = []
for _dest_tag in results["destination_tags"]:
_dest_images.append(
"{0}:{1}-{2}".format(index_image_repo, _dest_tag, tag_suffix)
)
ContainerImagePusher.run_tag_images(
permanent_index_image, [dest_image], True, index_image_ts
permanent_index_image, _dest_images, True, index_image_ts
)
8 changes: 2 additions & 6 deletions pubtools/_quay/signature_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -665,16 +665,12 @@ def sign_operator_images(self, iib_results, tag_suffix):
)
# Version acts as a tag of the index image
# use hotfix tag if it exists
if iib_details["is_hotfix"]:
for dest_tag in iib_details["destination_tags"]:
claim_messages += self.construct_index_image_claim_messages(
permanent_index_image,
[iib_details["hotfix_tag"], "%s-%s" % (version, tag_suffix)],
[dest_tag, "%s-%s" % (dest_tag, tag_suffix)],
signing_keys,
)
else:
claim_messages += self.construct_index_image_claim_messages(
permanent_index_image, [version, "%s-%s" % (version, tag_suffix)], signing_keys
)

if not claim_messages:
LOG.info("No new claim messages will be uploaded")
Expand Down
Loading

0 comments on commit 61276cc

Please sign in to comment.