diff --git a/.binny.yaml b/.binny.yaml
index a9a37439333..cab909d2941 100644
--- a/.binny.yaml
+++ b/.binny.yaml
@@ -115,3 +115,19 @@ tools:
method: github-release
with:
repo: cli/cli
+
+ # used to upload test fixture cache
+ - name: oras
+ version:
+ want: v1.2.0
+ method: github-release
+ with:
+ repo: oras-project/oras
+
+ # used to upload test fixture cache
+ - name: yq
+ version:
+ want: v4.44.3
+ method: github-release
+ with:
+ repo: mikefarah/yq
\ No newline at end of file
diff --git a/.github/actions/bootstrap/action.yaml b/.github/actions/bootstrap/action.yaml
index bc771d3b508..6150113aee0 100644
--- a/.github/actions/bootstrap/action.yaml
+++ b/.github/actions/bootstrap/action.yaml
@@ -13,16 +13,15 @@ inputs:
cache-key-prefix:
description: "Prefix all cache keys with this value"
required: true
- default: "1ac8281053"
- compute-fingerprints:
- description: "Compute test fixture fingerprints"
+ default: "181053ac82"
+ download-test-fixture-cache:
+ description: "Download test fixture cache from OCI and github actions"
required: true
- default: "true"
+ default: "false"
bootstrap-apt-packages:
description: "Space delimited list of tools to install via apt"
default: "libxml2-utils"
-
runs:
using: "composite"
steps:
@@ -54,8 +53,14 @@ runs:
run: |
DEBIAN_FRONTEND=noninteractive sudo apt update && sudo -E apt install -y ${{ inputs.bootstrap-apt-packages }}
- - name: Create all cache fingerprints
- if: inputs.compute-fingerprints == 'true'
- shell: bash
- run: make fingerprints
+ - name: Restore ORAS cache from github actions
+ if: inputs.download-test-fixture-cache == 'true'
+ uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
+ with:
+ path: ${{ github.workspace }}/.tmp/oras-cache
+ key: ${{ inputs.cache-key-prefix }}-oras-cache
+ - name: Download test fixture cache
+ if: inputs.download-test-fixture-cache == 'true'
+ shell: bash
+ run: make download-test-fixture-cache
diff --git a/.github/scripts/ci-check.sh b/.github/scripts/ci-check.sh
deleted file mode 100755
index 0ab83a318ae..00000000000
--- a/.github/scripts/ci-check.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-red=$(tput setaf 1)
-bold=$(tput bold)
-normal=$(tput sgr0)
-
-# assert we are running in CI (or die!)
-if [[ -z "$CI" ]]; then
- echo "${bold}${red}This step should ONLY be run in CI. Exiting...${normal}"
- exit 1
-fi
diff --git a/.github/scripts/find_cache_paths.py b/.github/scripts/find_cache_paths.py
new file mode 100755
index 00000000000..cc2e4081af1
--- /dev/null
+++ b/.github/scripts/find_cache_paths.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+from __future__ import annotations
+
+import os
+import glob
+import sys
+import json
+import hashlib
+
+
+IGNORED_PREFIXES = []
+
+
+def find_fingerprints_and_check_dirs(base_dir):
+ all_fingerprints = set(glob.glob(os.path.join(base_dir, '**', 'test*', '**', '*.fingerprint'), recursive=True))
+
+ all_fingerprints = {os.path.relpath(fp) for fp in all_fingerprints
+ if not any(fp.startswith(prefix) for prefix in IGNORED_PREFIXES)}
+
+ if not all_fingerprints:
+ show("No .fingerprint files or cache directories found.")
+ exit(1)
+
+ missing_content = []
+ valid_paths = set()
+ fingerprint_contents = []
+
+ for fingerprint in all_fingerprints:
+ path = fingerprint.replace('.fingerprint', '')
+
+ if not os.path.exists(path):
+ missing_content.append(path)
+ continue
+
+ if not os.path.isdir(path):
+ valid_paths.add(path)
+ continue
+
+ if os.listdir(path):
+ valid_paths.add(path)
+ else:
+ missing_content.append(path)
+
+ with open(fingerprint, 'r') as f:
+ content = f.read().strip()
+ fingerprint_contents.append((fingerprint, content))
+
+ return sorted(valid_paths), missing_content, fingerprint_contents
+
+
+def parse_fingerprint_contents(fingerprint_content):
+ input_map = {}
+ for line in fingerprint_content.splitlines():
+ digest, path = line.split()
+ input_map[path] = digest
+ return input_map
+
+
+def calculate_sha256(fingerprint_contents):
+ sorted_fingerprint_contents = sorted(fingerprint_contents, key=lambda x: x[0])
+
+ concatenated_contents = ''.join(content for _, content in sorted_fingerprint_contents)
+
+ sha256_hash = hashlib.sha256(concatenated_contents.encode()).hexdigest()
+
+ return sha256_hash
+
+
+def calculate_file_sha256(file_path):
+ sha256_hash = hashlib.sha256()
+ with open(file_path, 'rb') as f:
+ for byte_block in iter(lambda: f.read(4096), b""):
+ sha256_hash.update(byte_block)
+ return sha256_hash.hexdigest()
+
+
+def show(*s: str):
+ print(*s, file=sys.stderr)
+
+
+def main(file_path: str | None):
+ base_dir = '.'
+ valid_paths, missing_content, fingerprint_contents = find_fingerprints_and_check_dirs(base_dir)
+
+ if missing_content:
+ show("The following paths are missing or have no content, but have corresponding .fingerprint files:")
+ for path in sorted(missing_content):
+ show(f"- {path}")
+ show("Please ensure these paths exist and have content if they are directories.")
+ exit(1)
+
+ sha256_hash = calculate_sha256(fingerprint_contents)
+
+ paths_with_digests = []
+ for path in sorted(valid_paths):
+ fingerprint_file = f"{path}.fingerprint"
+ try:
+ if os.path.exists(fingerprint_file):
+ file_digest = calculate_file_sha256(fingerprint_file)
+
+ # Parse the fingerprint file to get the digest/path tuples
+ with open(fingerprint_file, 'r') as f:
+ fingerprint_content = f.read().strip()
+ input_map = parse_fingerprint_contents(fingerprint_content)
+
+ paths_with_digests.append({
+ "path": path,
+ "digest": file_digest,
+ "input": input_map
+ })
+
+ except Exception as e:
+ show(f"Error processing {fingerprint_file}: {e}")
+ raise e
+
+
+ output = {
+ "digest": sha256_hash,
+ "paths": paths_with_digests
+ }
+
+ content = json.dumps(output, indent=2, sort_keys=True)
+
+ if file_path:
+ with open(file_path, 'w') as f:
+ f.write(content)
+
+ print(content)
+
+
+if __name__ == "__main__":
+ file_path = None
+ if len(sys.argv) > 1:
+ file_path = sys.argv[1]
+ main(file_path)
diff --git a/.github/scripts/fingerprint_docker_fixtures.py b/.github/scripts/fingerprint_docker_fixtures.py
new file mode 100755
index 00000000000..4a74420e010
--- /dev/null
+++ b/.github/scripts/fingerprint_docker_fixtures.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+
+import os
+import subprocess
+import hashlib
+
+BOLD = '\033[1m'
+YELLOW = '\033[0;33m'
+RESET = '\033[0m'
+
+
+def print_message(message):
+ print(f"{YELLOW}{message}{RESET}")
+
+
+def sha256sum(filepath):
+ h = hashlib.sha256()
+ with open(filepath, 'rb') as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ h.update(chunk)
+ return h.hexdigest()
+
+
+def is_git_tracked_or_untracked(directory):
+ """Returns a sorted list of files in the directory that are tracked or not ignored by Git."""
+ result = subprocess.run(
+ ["git", "ls-files", "--cached", "--others", "--exclude-standard"],
+ cwd=directory,
+ stdout=subprocess.PIPE,
+ text=True
+ )
+ return sorted(result.stdout.strip().splitlines())
+
+
+def find_test_fixture_dirs_with_images(base_dir):
+ """Find directories that contain 'test-fixtures' and at least one 'image-*' directory."""
+ for root, dirs, files in os.walk(base_dir):
+ if 'test-fixtures' in root:
+ image_dirs = [d for d in dirs if d.startswith('image-')]
+ if image_dirs:
+ yield os.path.realpath(root)
+
+
+def generate_fingerprints():
+ print_message("creating fingerprint files for docker fixtures...")
+
+ for test_fixture_dir in find_test_fixture_dirs_with_images('.'):
+ cache_fingerprint_path = os.path.join(test_fixture_dir, 'cache.fingerprint')
+
+ with open(cache_fingerprint_path, 'w') as fingerprint_file:
+ for image_dir in find_image_dirs(test_fixture_dir):
+ for file in is_git_tracked_or_untracked(image_dir):
+ file_path = os.path.join(image_dir, file)
+ checksum = sha256sum(file_path)
+ path_from_fixture_dir = os.path.relpath(file_path, test_fixture_dir)
+ fingerprint_file.write(f"{checksum} {path_from_fixture_dir}\n")
+
+
+def find_image_dirs(test_fixture_dir):
+ """Find all 'image-*' directories inside a given test-fixture directory."""
+ result = []
+ for root, dirs, files in os.walk(test_fixture_dir):
+ for dir_name in dirs:
+ if dir_name.startswith('image-'):
+ result.append(os.path.join(root, dir_name))
+ return sorted(result)
+
+
+if __name__ == "__main__":
+ generate_fingerprints()
diff --git a/.github/scripts/labeler.py b/.github/scripts/labeler.py
old mode 100644
new mode 100755
index b33dd6df028..2efd33206c8
--- a/.github/scripts/labeler.py
+++ b/.github/scripts/labeler.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python3
+
from __future__ import annotations
import sys
diff --git a/.github/scripts/labeler_test.py b/.github/scripts/labeler_test.py
old mode 100644
new mode 100755
index 36eebd18c9f..d792929f106
--- a/.github/scripts/labeler_test.py
+++ b/.github/scripts/labeler_test.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python3
+
import unittest
from unittest.mock import patch
import subprocess
diff --git a/.github/workflows/release-version-file.yaml b/.github/workflows/release-version-file.yaml
index cd41a0c8e91..6635a053f12 100644
--- a/.github/workflows/release-version-file.yaml
+++ b/.github/workflows/release-version-file.yaml
@@ -1,4 +1,4 @@
-name: "Release"
+name: "Release: version file"
on:
diff --git a/.github/workflows/test-fixture-cache-publish.yaml b/.github/workflows/test-fixture-cache-publish.yaml
new file mode 100644
index 00000000000..3144a0b6bc5
--- /dev/null
+++ b/.github/workflows/test-fixture-cache-publish.yaml
@@ -0,0 +1,39 @@
+name: "Test fixture cache: publish"
+
+on:
+ workflow_dispatch:
+ schedule:
+ # run nightly at 4AM UTC
+ - cron: "0 4 * * *"
+
+permissions:
+ contents: read
+
+jobs:
+
+ Publish:
+ name: "Publish test fixture image cache"
+ # we use this runner to get enough storage space for docker images and fixture cache
+ runs-on: ubuntu-22.04-4core-16gb
+ permissions:
+ packages: write
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
+
+ - name: Bootstrap environment
+ uses: ./.github/actions/bootstrap
+ with:
+ # we want to rebuild the cache with no previous state
+ download-test-fixture-cache: false
+
+ - name: Run all tests
+ run: make test
+ env:
+ # we want to rebuild the cache with no previous state
+ DOWNLOAD_TEST_FIXTURE_CACHE: "false"
+
+ - name: Login to GitHub Container Registry (ORAS)
+ run: echo "${{ secrets.GITHUB_TOKEN }}" | .tool/oras login ghcr.io -u ${{ github.actor }} --password-stdin
+
+ - name: Publish test fixture cache
+ run: make upload-test-fixture-cache
diff --git a/.github/workflows/update-bootstrap-tools.yml b/.github/workflows/update-bootstrap-tools.yml
index b07ad4580d7..3cdedf52af4 100644
--- a/.github/workflows/update-bootstrap-tools.yml
+++ b/.github/workflows/update-bootstrap-tools.yml
@@ -19,7 +19,6 @@ jobs:
uses: ./.github/actions/bootstrap
with:
bootstrap-apt-packages: ""
- compute-fingerprints: "false"
go-dependencies: false
- name: "Update tool versions"
diff --git a/.github/workflows/validations.yaml b/.github/workflows/validations.yaml
index 669d8b8c5c4..0ebca5c8235 100644
--- a/.github/workflows/validations.yaml
+++ b/.github/workflows/validations.yaml
@@ -35,48 +35,8 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
-
- - name: Restore file executable test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: syft/file/cataloger/executable/test-fixtures/elf/bin
- key: ${{ runner.os }}-unit-file-executable-elf-cache-${{ hashFiles( 'syft/file/cataloger/executable/test-fixtures/elf/cache.fingerprint' ) }}
-
- - name: Restore file executable shared-info test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: syft/file/cataloger/executable/test-fixtures/shared-info/bin
- key: ${{ runner.os }}-unit-file-executable-shared-info-cache-${{ hashFiles( 'syft/file/cataloger/executable/test-fixtures/shared-info/cache.fingerprint' ) }}
-
- - name: Restore Java test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: syft/pkg/cataloger/java/test-fixtures/java-builds/packages
- key: ${{ runner.os }}-unit-java-cache-${{ hashFiles( 'syft/pkg/cataloger/java/test-fixtures/java-builds/cache.fingerprint' ) }}
-
- - name: Restore RPM test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
- path: syft/pkg/cataloger/redhat/test-fixtures/rpms
- key: ${{ runner.os }}-unit-rpm-cache-${{ hashFiles( 'syft/pkg/cataloger/redhat/test-fixtures/rpms.fingerprint' ) }}
-
- - name: Restore go binary test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: syft/pkg/cataloger/golang/test-fixtures/archs/binaries
- key: ${{ runner.os }}-unit-go-binaries-cache-${{ hashFiles( 'syft/pkg/cataloger/golang/test-fixtures/archs/binaries.fingerprint' ) }}
-
- - name: Restore binary cataloger test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: syft/pkg/cataloger/binary/test-fixtures/classifiers/bin
- key: ${{ runner.os }}-unit-binary-cataloger-cache-${{ hashFiles( 'syft/pkg/cataloger/binary/test-fixtures/cache.fingerprint' ) }}
-
- - name: Restore Kernel test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: syft/pkg/cataloger/kernel/test-fixtures/cache
- key: ${{ runner.os }}-unit-kernel-cache-${{ hashFiles( 'syft/pkg/cataloger/kernel/test-fixtures/cache.fingerprint' ) }}
+ download-test-fixture-cache: true
- name: Run unit tests
run: make unit
@@ -91,16 +51,12 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
+ with:
+ download-test-fixture-cache: true
- name: Validate syft output against the CycloneDX schema
run: make validate-cyclonedx-schema
- - name: Restore integration test cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: ${{ github.workspace }}/cmd/syft/internal/test/integration/test-fixtures/cache
- key: ${{ runner.os }}-integration-test-cache-${{ hashFiles('/cmd/syft/internal/test/integration/test-fixtures/cache.fingerprint') }}
-
- name: Run integration tests
run: make integration
@@ -143,6 +99,8 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
+ with:
+ download-test-fixture-cache: true
- name: Download snapshot build
id: snapshot-cache
@@ -162,13 +120,6 @@ jobs:
- name: Run comparison tests (Linux)
run: make compare-linux
- - name: Restore install.sh test image cache
- id: install-test-image-cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: ${{ github.workspace }}/test/install/cache
- key: ${{ runner.os }}-install-test-image-cache-${{ hashFiles('test/install/cache.fingerprint') }}
-
- name: Load test image cache
if: steps.install-test-image-cache.outputs.cache-hit == 'true'
run: make install-test-cache-load
@@ -196,8 +147,8 @@ jobs:
uses: ./.github/actions/bootstrap
with:
bootstrap-apt-packages: ""
- compute-fingerprints: "false"
go-dependencies: false
+ download-test-fixture-cache: true
- name: Download snapshot build
id: snapshot-cache
@@ -214,13 +165,6 @@ jobs:
if: steps.snapshot-cache.outputs.cache-hit != 'true'
run: echo "unable to download snapshots from previous job" && false
- - name: Restore docker image cache for compare testing
- id: mac-compare-testing-cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
- with:
- path: image.tar
- key: ${{ runner.os }}-${{ hashFiles('test/compare/mac.sh') }}
-
- name: Run comparison tests (Mac)
run: make compare-mac
@@ -238,12 +182,8 @@ jobs:
- name: Bootstrap environment
uses: ./.github/actions/bootstrap
-
- - name: Restore CLI test-fixture cache
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 #v4.0.2
with:
- path: ${{ github.workspace }}/test/cli/test-fixtures/cache
- key: ${{ runner.os }}-cli-test-cache-${{ hashFiles('test/cli/test-fixtures/cache.fingerprint') }}
+ download-test-fixture-cache: true
- name: Download snapshot build
id: snapshot-cache
@@ -262,3 +202,22 @@ jobs:
- name: Run CLI Tests (Linux)
run: make cli
+
+
+ Cleanup-Cache:
+ name: "Cleanup snapshot cache"
+ if: always()
+ runs-on: ubuntu-20.04
+ permissions:
+ actions: write
+ needs:
+ - Acceptance-Linux
+ - Acceptance-Mac
+ - Cli-Linux
+ steps:
+ - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7
+
+ - name: Delete snapshot cache
+ run: gh cache delete "snapshot-build-${{ github.run_id }}"
+ env:
+ GH_TOKEN: ${{ github.token }}
diff --git a/Makefile b/Makefile
index 9089ee6192c..2f1ae1f8e5b 100644
--- a/Makefile
+++ b/Makefile
@@ -25,8 +25,8 @@ ci-bootstrap-go:
# this is a bootstrapping catch-all, where if the target doesn't exist, we'll ensure the tools are installed and then try again
%:
- make $(TASK)
- $(TASK) $@
+ @make --silent $(TASK)
+ @$(TASK) $@
## Shim targets #################################
diff --git a/Taskfile.yaml b/Taskfile.yaml
index c0a8bc33402..feb12f636ec 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -4,9 +4,19 @@ vars:
OWNER: anchore
PROJECT: syft
+ CACHE_IMAGE: ghcr.io/{{ .OWNER }}/{{ .PROJECT }}/test-fixture-cache:latest
+
# static file dirs
TOOL_DIR: .tool
TMP_DIR: .tmp
+ ORAS_CACHE: "{{ .TMP_DIR }}/oras-cache"
+ CACHE_PATHS_FILE: "{{ .TMP_DIR }}/cache_paths.json"
+ LAST_CACHE_PULL_FILE: "{{ .TMP_DIR }}/last_cache_paths.json"
+
+ # TOOLS
+ ORAS: "{{ .TOOL_DIR }}/oras"
+ YQ: "{{ .TOOL_DIR }}/yq"
+ TASK: "{{ .TOOL_DIR }}/task"
# used for changelog generation
CHANGELOG: CHANGELOG.md
@@ -33,6 +43,9 @@ vars:
COMPARE_DIR: ./test/compare
COMPARE_TEST_IMAGE: centos:8.2.2004
+env:
+ GNUMAKEFLAGS: '--no-print-directory'
+
tasks:
## High-level tasks #################################
@@ -65,6 +78,7 @@ tasks:
- task: benchmark
- task: test-utils
- task: cli
+ - task: check-docker-cache
## Bootstrap tasks #################################
@@ -212,10 +226,6 @@ tasks:
# that the cache being restored with the correct binary will be rebuilt since the timestamps
# and local checksums will not line up.
deps: [tools, snapshot]
- sources:
- - "{{ .SNAPSHOT_BIN }}"
- - ./test/cli/**
- - ./**/*.go
cmds:
- cmd: "echo 'testing binary: {{ .SNAPSHOT_BIN }}'"
silent: true
@@ -229,18 +239,14 @@ tasks:
test-utils:
desc: Run tests for pipeline utils
- sources:
- - .github/scripts/labeler*.py
cmds:
- - cmd: python .github/scripts/labeler_test.py
+ - cmd: .github/scripts/labeler_test.py
## Benchmark test targets #################################
benchmark:
deps: [tmpdir]
- sources:
- - ./**/*.go
generates:
- "{{ .TMP_DIR }}/benchmark-main.txt"
cmds:
@@ -253,8 +259,6 @@ tasks:
show-benchstat:
deps: [benchmark, tmpdir]
- sources:
- - "{{ .TMP_DIR }}/benchstat.txt"
cmds:
- cmd: "cat {{ .TMP_DIR }}/benchstat.txt"
silent: true
@@ -263,56 +267,188 @@ tasks:
## Test-fixture-related targets #################################
fingerprints:
- desc: Generate test fixture fingerprints
+ desc: Generate fingerprints for all non-docker test fixture
+ silent: true
+ # this will look for `test-fixtures/Makefile` and invoke the `fingerprint` target to calculate all cache input fingerprint files
generates:
- - cmd/syft/internal/test/integration/test-fixtures/cache.fingerprint
- - syft/file/cataloger/executable/test-fixtures/elf/cache.fingerprint
- - syft/file/cataloger/executable/test-fixtures/shared-info/cache.fingerprint
- - syft/pkg/cataloger/binary/test-fixtures/cache.fingerprint
- - syft/pkg/cataloger/java/test-fixtures/java-builds/cache.fingerprint
- - syft/pkg/cataloger/golang/test-fixtures/archs/binaries.fingerprint
- - syft/pkg/cataloger/redhat/test-fixtures/rpms.fingerprint
- - syft/pkg/cataloger/kernel/test-fixtures/cache.fingerprint
+ - '**/test-fixtures/**/*.fingerprint'
- test/install/cache.fingerprint
- - test/cli/test-fixtures/cache.fingerprint
- cmds:
- # for EXECUTABLE unit test fixtures
- - "cd syft/file/cataloger/executable/test-fixtures/elf && make cache.fingerprint"
- - "cd syft/file/cataloger/executable/test-fixtures/shared-info && make cache.fingerprint"
- # for IMAGE integration test fixtures
- - "cd cmd/syft/internal/test/integration/test-fixtures && make cache.fingerprint"
- # for BINARY unit test fixtures
- - "cd syft/pkg/cataloger/binary/test-fixtures && make cache.fingerprint"
- # for JAVA BUILD unit test fixtures
- - "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make cache.fingerprint"
- # for GO BINARY unit test fixtures
- - "cd syft/pkg/cataloger/golang/test-fixtures/archs && make binaries.fingerprint"
- # for RPM unit test fixtures
- - "cd syft/pkg/cataloger/redhat/test-fixtures && make rpms.fingerprint"
- # for Kernel unit test fixtures
- - "cd syft/pkg/cataloger/kernel/test-fixtures && make cache.fingerprint"
- # for INSTALL test fixtures
- - "cd test/install && make cache.fingerprint"
- # for CLI test fixtures
- - "cd test/cli/test-fixtures && make cache.fingerprint"
-
- fixtures:
- desc: Generate test fixtures
- cmds:
- - "cd syft/file/cataloger/executable/test-fixtures/elf && make"
- - "cd syft/file/cataloger/executable/test-fixtures/shared-info && make"
- - "cd syft/pkg/cataloger/java/test-fixtures/java-builds && make"
- - "cd syft/pkg/cataloger/redhat/test-fixtures && make"
- - "cd syft/pkg/cataloger/binary/test-fixtures && make"
+ cmds:
+ - |
+ BOLD='\033[1m'
+ YELLOW='\033[0;33m'
+ RESET='\033[0m'
+
+ echo -e "${YELLOW}creating fingerprint files for non-docker fixtures...${RESET}"
+ for dir in $(find . -type d -name 'test-fixtures'); do
+ if [ -f "$dir/Makefile" ]; then
+ # for debugging...
+ #echo -e "${YELLOW}• calculating fingerprints in $dir... ${RESET}"
+
+ (make -C "$dir" fingerprint)
+ fi
+ done
+
+ # for debugging...
+ # echo -e "generated all fixture fingerprints"
+
+ - .github/scripts/fingerprint_docker_fixtures.py
+ - |
+ # if DOWNLOAD_TEST_FIXTURE_CACHE is set to 'false', then we don't need to calculate the fingerprint for the cache
+ if [ "$DOWNLOAD_TEST_FIXTURE_CACHE" = "false" ]; then
+ exit 0
+ fi
+ .github/scripts/find_cache_paths.py {{ .CACHE_PATHS_FILE }} > /dev/null
+
+
+ refresh-fixtures:
+ desc: Clear and fetch all test fixture cache
+ aliases:
+ - fixtures
+ silent: true
+ deps:
+ - tools
+ cmds:
+ - |
+ BOLD='\033[1m'
+ PURPLE='\033[0;35m'
+ RESET='\033[0m'
+
+ # if DOWNLOAD_TEST_FIXTURE_CACHE is set to 'false', then skip the cache download and always build
+ if [ "$DOWNLOAD_TEST_FIXTURE_CACHE" = "false" ]; then
+ echo -e "${BOLD}${PURPLE}skipping cache download, rebuilding cache...${RESET}"
+ {{ .TASK }} build-fixtures
+ exit 0
+ fi
+
+ LATEST_FINGERPRINT=$(docker manifest inspect {{ .CACHE_IMAGE }} | {{ .YQ }} -r '.annotations.fingerprint')
+
+ echo "latest cache: $LATEST_FINGERPRINT"
+
+ if [ -f {{ .LAST_CACHE_PULL_FILE }} ]; then
+ LAST_PULL_FINGERPRINT=$(cat {{ .LAST_CACHE_PULL_FILE }} | {{ .YQ }} -r '.digest')
+ else
+ echo -e "${BOLD}${PURPLE}empty cache, downloading cache...${RESET}"
+ {{ .TASK }} download-test-fixture-cache
+ exit 0
+ fi
+
+ {{ .TASK }} fingerprints
+
+ WANT_FINGERPRINT=$(cat {{ .CACHE_PATHS_FILE }} | {{ .YQ }} -r '.digest')
+
+ echo "desired cache: $WANT_FINGERPRINT"
+ echo "last pulled cache: $LAST_PULL_FINGERPRINT"
+
+ # if we already have the latest cache, skip the refresh
+ if [ "$LAST_PULL_FINGERPRINT" = "$WANT_FINGERPRINT" ]; then
+ echo -e "${BOLD}${PURPLE}already have the latest cache (skipping cache download)${RESET}"
+ exit 0
+ fi
+
+ # at this point we only refresh the cache if we want the same cache that is currently available.
+ # we don't by default refresh the cache if the cache if it is simply different from what we have,
+ # because we may be working on a code change that doesn't require a cache refresh (but could trigger one,
+ # which would be annoying to deal with in a development workflow).
+
+ if [ "$LATEST_FINGERPRINT" = "$WANT_FINGERPRINT" ]; then
+ echo -e "${BOLD}${PURPLE}found newer cache! downloading cache...${RESET}"
+ {{ .TASK }} download-test-fixture-cache
+ else
+ echo -e "${BOLD}${PURPLE}found different cache, but isn't clear if it's newer (skipping cache download and manually building)${RESET}"
+
+ {{ .YQ }} eval '.paths[] | "\(.digest) \(.path)"' {{ .LAST_CACHE_PULL_FILE }} > .tmp/last_cache_lines
+ {{ .YQ }} eval '.paths[] | "\(.digest) \(.path)"' {{ .CACHE_PATHS_FILE }} > .tmp/cache_lines
+ diff .tmp/last_cache_lines .tmp/cache_lines || true
+
+ echo -e "${BOLD}${PURPLE}diff with more context...${RESET}"
+
+ diff -U10000 {{ .LAST_CACHE_PULL_FILE }} {{ .CACHE_PATHS_FILE }} || true
+
+ echo -e "${BOLD}${PURPLE}detected changes to input material, manually building fixtures...${RESET}"
+
+ {{ .TASK }} build-fixtures
+ fi
+
+ build-fixtures:
+ desc: Generate all non-docker test fixtures
+ silent: true
+ # this will look for `test-fixtures/Makefile` and invoke the `fixtures` target to generate any and all test fixtures
+ cmds:
+ - |
+ BOLD='\033[1m'
+ YELLOW='\033[0;33m'
+ RESET='\033[0m'
+
+ # Use a for loop with command substitution to avoid subshell issues
+ for dir in $(find . -type d -name 'test-fixtures'); do
+ if [ -f "$dir/Makefile" ]; then
+ echo -e "${YELLOW}${BOLD}generating fixtures in $dir${RESET}"
+ (make -C "$dir" fixtures)
+ fi
+ done
+ echo -e "${BOLD}generated all fixtures${RESET}"
+
+ download-test-fixture-cache:
+ desc: Download test fixture cache from ghcr.io
+ deps: [tools, clean-cache]
+ vars:
+ CACHE_DIGEST:
+ sh: docker manifest inspect {{ .CACHE_IMAGE }} | {{ .YQ }} -r '.annotations.fingerprint'
+ cmds:
+ - silent: true
+ cmd: |
+ # if oras cache is > 4 GB, delete it
+ if [ -d {{ .ORAS_CACHE }} ]; then
+ total_size=$(du -c {{ .ORAS_CACHE }} | grep total | awk '{print $1}')
+ if [ "$total_size" -gt 4194304 ]; then
+ echo 'deleting oras cache'
+ rm -rf {{ .ORAS_CACHE }}
+ fi
+ fi
+ - "ORAS_CACHE={{ .ORAS_CACHE }} {{ .ORAS }} pull {{ .CACHE_IMAGE }}"
+ - "cp {{ .CACHE_PATHS_FILE }} {{ .LAST_CACHE_PULL_FILE }}"
+
+ upload-test-fixture-cache:
+ desc: Upload the test fixture cache to ghcr.io
+ deps: [tools, fingerprints]
+ silent: true
+ cmd: |
+ set -eu
+ oras_command="{{ .ORAS }} push {{ .CACHE_IMAGE }}"
+
+ paths=$(cat {{ .CACHE_PATHS_FILE }} | {{ .YQ }} -r '.paths[].path')
+ for path in $paths; do
+ oras_command+=" $path"
+ done
+ oras_command+=" {{ .CACHE_PATHS_FILE }}"
+
+ oras_command+=" --annotation org.opencontainers.image.source=https://github.com/{{ .OWNER }}/{{ .PROJECT }}"
+ oras_command+=" --annotation fingerprint=$(cat {{ .CACHE_PATHS_FILE }} | {{ .YQ }} -r '.digest')"
+
+ echo "Executing: $oras_command"
+ eval $oras_command
show-test-image-cache:
silent: true
cmds:
- - "echo '\nDocker daemon cache:'"
+ - "echo 'Docker daemon cache:'"
- "docker images --format '{{`{{.ID}}`}} {{`{{.Repository}}`}}:{{`{{.Tag}}`}}' | grep stereoscope-fixture- | sort"
- "echo '\nTar cache:'"
- - 'find . -type f -wholename "**/test-fixtures/snapshot/*" | sort'
+ - 'find . -type f -wholename "**/test-fixtures/cache/stereoscope-fixture-*.tar" | sort'
+ check-docker-cache:
+ desc: Ensure docker caches aren't using too much disk space
+ silent: true
+ cmd: |
+ total_size=$(find . | grep cache | grep tar | xargs du -c | grep total | awk '{print $1}')
+ find . | grep cache | grep tar | xargs du
+ echo "total $total_size KB"
+
+ if [ "$total_size" -gt 1048576 ]; then
+ echo 'docker cache is larger than 1GB'
+ exit 1
+ fi
## install.sh testing targets #################################
@@ -457,7 +593,16 @@ tasks:
ci-check:
# desc: "[CI only] Are you in CI?"
cmds:
- - cmd: .github/scripts/ci-check.sh
+ - cmd: |
+ red=$(tput setaf 1)
+ bold=$(tput bold)
+ normal=$(tput sgr0)
+
+ # assert we are running in CI (or die!)
+ if [[ -z "$CI" ]]; then
+ echo "${bold}${red}This step should ONLY be run in CI. Exiting...${normal}"
+ exit 1
+ fi
silent: true
ci-release:
@@ -489,8 +634,31 @@ tasks:
- "rm -rf {{ .SNAPSHOT_DIR }}"
- "rm -rf {{ .TMP_DIR }}/goreleaser.yaml"
+ clean-docker-cache:
+ desc: Remove all docker cache tars and images from the daemon
+ cmds:
+ - find . -type d -wholename "**/test-fixtures/cache" | xargs rm -rf
+ - docker images --format '{{`{{.ID}}`}} {{`{{.Repository}}`}}' | grep stereoscope-fixture- | awk '{print $1}' | uniq | xargs -r docker rmi --force
+
+ clean-oras-cache:
+ desc: Remove all cache for oras commands
+ cmd: rm -rf {{ .ORAS_CACHE }}
+
clean-cache:
- desc: Remove all docker cache and local image tar cache
+ desc: Remove all image docker tar cache, images from the docker daemon, and ephemeral test fixtures
cmds:
- - 'find . -type f -wholename "**/test-fixtures/cache/stereoscope-fixture-*.tar" -delete'
- - "docker images --format '{{`{{.ID}}`}} {{`{{.Repository}}`}}' | grep stereoscope-fixture- | awk '{print $$1}' | uniq | xargs -r docker rmi --force"
+ - task: clean-docker-cache
+ - |
+ BOLD='\033[1m'
+ YELLOW='\033[0;33m'
+ RESET='\033[0m'
+
+ # Use a for loop with command substitution to avoid subshell issues
+ for dir in $(find . -type d -name 'test-fixtures'); do
+ if [ -f "$dir/Makefile" ]; then
+ echo -e "${YELLOW}${BOLD}deleting ephemeral test fixtures in $dir${RESET}"
+ (make -C "$dir" clean)
+ fi
+ done
+ echo -e "${BOLD}Deleted all ephemeral test fixtures${RESET}"
+ - rm -f {{ .LAST_CACHE_PULL_FILE }} {{ .CACHE_PATHS_FILE }}
diff --git a/cmd/syft/internal/test/integration/.gitignore b/cmd/syft/internal/test/integration/.gitignore
new file mode 100644
index 00000000000..872aa273a4e
--- /dev/null
+++ b/cmd/syft/internal/test/integration/.gitignore
@@ -0,0 +1 @@
+results
\ No newline at end of file
diff --git a/cmd/syft/internal/test/integration/all_layers_squashed_comparison_test.go b/cmd/syft/internal/test/integration/all_layers_squashed_comparison_test.go
index 4dedd9ef44e..41f8e35023d 100644
--- a/cmd/syft/internal/test/integration/all_layers_squashed_comparison_test.go
+++ b/cmd/syft/internal/test/integration/all_layers_squashed_comparison_test.go
@@ -7,7 +7,7 @@ import (
)
func Test_AllLayersIncludesSquashed(t *testing.T) {
- // This is a verification test for issue #894 (https://github.com/anchore/syft/issues/894)
+ // This is a verification test for issue grype/#894 (https://github.com/anchore/grype/issues/894)
allLayers, _ := catalogFixtureImage(t, "image-suse-all-layers", source.AllLayersScope)
squashed, _ := catalogFixtureImage(t, "image-suse-all-layers", source.SquashedScope)
diff --git a/cmd/syft/internal/test/integration/encode_decode_cycle_test.go b/cmd/syft/internal/test/integration/encode_decode_cycle_test.go
index 56bd7b77260..dd3a99a8552 100644
--- a/cmd/syft/internal/test/integration/encode_decode_cycle_test.go
+++ b/cmd/syft/internal/test/integration/encode_decode_cycle_test.go
@@ -2,7 +2,9 @@ package integration
import (
"bytes"
- "regexp"
+ "os"
+ "path/filepath"
+ "strings"
"testing"
"github.com/google/go-cmp/cmp"
@@ -12,8 +14,6 @@ import (
"github.com/anchore/syft/cmd/syft/internal/options"
"github.com/anchore/syft/syft/format"
- "github.com/anchore/syft/syft/format/cyclonedxjson"
- "github.com/anchore/syft/syft/format/cyclonedxxml"
"github.com/anchore/syft/syft/format/syftjson"
"github.com/anchore/syft/syft/source"
)
@@ -43,26 +43,27 @@ func TestEncodeDecodeEncodeCycleComparison(t *testing.T) {
},
json: true,
},
- {
- name: cyclonedxjson.ID.String(),
- redactor: func(in []byte) []byte {
- // unstable values
- in = regexp.MustCompile(`"(timestamp|serialNumber|bom-ref|ref)":\s*"(\n|[^"])+"`).ReplaceAll(in, []byte(`"$1": "redacted"`))
- in = regexp.MustCompile(`"(dependsOn)":\s*\[(?:\s|[^]])+]`).ReplaceAll(in, []byte(`"$1": []`))
- return in
- },
- json: true,
- },
- {
- name: cyclonedxxml.ID.String(),
- redactor: func(in []byte) []byte {
- // unstable values
- in = regexp.MustCompile(`(serialNumber|bom-ref|ref)="[^"]+"`).ReplaceAll(in, []byte{})
- in = regexp.MustCompile(`[^<]+`).ReplaceAll(in, []byte{})
-
- return in
- },
- },
+ // TODO: ignoring the `ref` field though does create stable results to compare, but the SBOM is fundamentally gutted and not worth comparing (find a better redaction or compare method)
+ //{
+ // name: cyclonedxjson.ID.String(),
+ // redactor: func(in []byte) []byte {
+ // // unstable values
+ // in = regexp.MustCompile(`"(timestamp|serialNumber|bom-ref|ref)":\s*"(\n|[^"])+"`).ReplaceAll(in, []byte(`"$1": "redacted"`))
+ // in = regexp.MustCompile(`"(dependsOn)":\s*\[(?:\s|[^]])+]`).ReplaceAll(in, []byte(`"$1": []`))
+ // return in
+ // },
+ // json: true,
+ //},
+ //{
+ // name: cyclonedxxml.ID.String(),
+ // redactor: func(in []byte) []byte {
+ // // unstable values
+ // in = regexp.MustCompile(`(serialNumber|bom-ref|ref)="[^"]+"`).ReplaceAll(in, []byte{})
+ // in = regexp.MustCompile(`[^<]+`).ReplaceAll(in, []byte{})
+ //
+ // return in
+ // },
+ //},
}
opts := options.DefaultOutput()
@@ -112,6 +113,21 @@ func TestEncodeDecodeEncodeCycleComparison(t *testing.T) {
diffs := dmp.DiffMain(string(by1), string(by2), true)
t.Errorf("diff: %s", dmp.DiffPrettyText(diffs))
}
+
+ // write raw IMAGE@NAME-start and IMAGE@NAME-finish to files within the results dir
+ // ... this is helpful for debugging
+ require.NoError(t, os.MkdirAll("results", 0700))
+
+ suffix := "sbom"
+ switch {
+ case strings.Contains(test.name, "json"):
+ suffix = "json"
+ case strings.Contains(test.name, "xml"):
+ suffix = "xml"
+ }
+
+ require.NoError(t, os.WriteFile(filepath.Join("results", image+"@"+test.name+"-start."+suffix), by1, 0600))
+ require.NoError(t, os.WriteFile(filepath.Join("results", image+"@"+test.name+"-finish."+suffix), by2, 0600))
}
})
}
diff --git a/cmd/syft/internal/test/integration/go_compiler_detection_test.go b/cmd/syft/internal/test/integration/go_compiler_detection_test.go
index 8c440a0309e..e6a0d588872 100644
--- a/cmd/syft/internal/test/integration/go_compiler_detection_test.go
+++ b/cmd/syft/internal/test/integration/go_compiler_detection_test.go
@@ -35,8 +35,8 @@ func TestGolangCompilerDetection(t *testing.T) {
for _, pkg := range packages {
foundCompilerVersions[pkg.Version] = struct{}{}
foundPURL[pkg.PURL] = struct{}{}
- for _, cpe := range pkg.CPEs {
- foundCPE[cpe] = struct{}{}
+ for _, c := range pkg.CPEs {
+ foundCPE[c] = struct{}{}
}
}
diff --git a/cmd/syft/internal/test/integration/java_purl_test.go b/cmd/syft/internal/test/integration/java_purl_test.go
index 5dab545d686..5de8875ca84 100644
--- a/cmd/syft/internal/test/integration/java_purl_test.go
+++ b/cmd/syft/internal/test/integration/java_purl_test.go
@@ -1,10 +1,9 @@
package integration
import (
- "fmt"
"testing"
- "github.com/stretchr/testify/assert"
+ "github.com/google/go-cmp/cmp"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
@@ -26,13 +25,9 @@ func TestJavaPURLs(t *testing.T) {
found[metadata.VirtualPath] = p.PURL
}
}
- for key, expectedPURL := range expectedPURLs {
- purl := found[key]
- assert.Equal(t, expectedPURL, purl, fmt.Sprintf("found wrong or missing PURL for %s want %s, got %s", key, expectedPURL, purl))
- }
- for key, foundPURL := range found {
- expectedPURL := expectedPURLs[key]
- assert.Equal(t, expectedPURL, foundPURL, fmt.Sprintf("found extra purl for %s want %s, got %s", key, expectedPURL, foundPURL))
+
+ if d := cmp.Diff(expectedPURLs, found); d != "" {
+ t.Errorf("unexpected purl values:\n%s", d)
}
}
diff --git a/cmd/syft/internal/test/integration/package_deduplication_test.go b/cmd/syft/internal/test/integration/package_deduplication_test.go
index 12fa9dcf207..ab8e580f8ad 100644
--- a/cmd/syft/internal/test/integration/package_deduplication_test.go
+++ b/cmd/syft/internal/test/integration/package_deduplication_test.go
@@ -1,5 +1,3 @@
-//go:build !arm64
-
package integration
import (
@@ -7,7 +5,6 @@ import (
"testing"
"github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/source"
@@ -22,41 +19,39 @@ func TestPackageDeduplication(t *testing.T) {
}{
{
scope: source.AllLayersScope,
- packageCount: 172, // without deduplication this would be 618
+ packageCount: 178, // without deduplication this would be ~600
instanceCount: map[string]int{
- "basesystem": 1,
- "wget": 1,
- "curl": 2, // upgraded in the image
- "vsftpd": 1,
- "httpd": 1, // rpm, - we exclude binary
+ "basesystem": 1,
+ "wget": 1,
+ "curl-minimal": 2, // upgraded in the image
+ "vsftpd": 1,
+ "httpd": 1, // rpm, - we exclude binary
},
locationCount: map[string]int{
- "basesystem-10.0-7.el7.centos": 4,
- "curl-7.29.0-59.el7": 1, // from base image
- "curl-7.29.0-59.el7_9.1": 3, // upgrade
- "wget-1.14-18.el7_6.1": 3,
- "vsftpd-3.0.2-29.el7_9": 2,
- "httpd-2.4.6-97.el7.centos.5": 1,
- // "httpd-2.4.6": 1, // binary
+ "basesystem-11-13.el9": 5, // in all layers
+ "curl-minimal-7.76.1-26.el9_3.2.0.1": 2, // base + wget layer
+ "curl-minimal-7.76.1-29.el9_4.1": 3, // curl upgrade layer + all above layers
+ "wget-1.21.1-8.el9_4": 4, // wget + all above layers
+ "vsftpd-3.0.5-5.el9": 2, // vsftpd + all above layers
+ "httpd-2.4.57-11.el9_4.1": 1, // last layer
},
},
{
scope: source.SquashedScope,
- packageCount: 170,
+ packageCount: 172,
instanceCount: map[string]int{
- "basesystem": 1,
- "wget": 1,
- "curl": 1, // upgraded, but the most recent
- "vsftpd": 1,
- "httpd": 1, // rpm, binary is now excluded by overlap
+ "basesystem": 1,
+ "wget": 1,
+ "curl-minimal": 1, // upgraded, but the most recent
+ "vsftpd": 1,
+ "httpd": 1, // rpm, binary is now excluded by overlap
},
locationCount: map[string]int{
- "basesystem-10.0-7.el7.centos": 1,
- "curl-7.29.0-59.el7_9.1": 1, // upgrade
- "wget-1.14-18.el7_6.1": 1,
- "vsftpd-3.0.2-29.el7_9": 1,
- "httpd-2.4.6-97.el7.centos.5": 1,
- // "httpd-2.4.6": 1, // binary (excluded)
+ "basesystem-11-13.el9": 1,
+ "curl-minimal-7.76.1-29.el9_4.1": 1, // upgrade
+ "wget-1.21.1-8.el9_4": 1,
+ "vsftpd-3.0.5-5.el9": 1,
+ "httpd-2.4.57-11.el9_4.1": 1,
},
},
}
@@ -75,20 +70,21 @@ func TestPackageDeduplication(t *testing.T) {
pkgs := sbom.Artifacts.Packages.PackagesByName(name)
// with multiple packages with the same name, something is wrong (or this is the wrong fixture)
- require.Len(t, pkgs, expectedInstanceCount)
-
- for _, p := range pkgs {
- nameVersion := fmt.Sprintf("%s-%s", name, p.Version)
- expectedLocationCount, ok := tt.locationCount[nameVersion]
- if !ok {
- t.Fatalf("missing name-version: %s", nameVersion)
- }
+ if assert.Len(t, pkgs, expectedInstanceCount, "unexpected package count for %s", name) {
+ for _, p := range pkgs {
+ nameVersion := fmt.Sprintf("%s-%s", name, p.Version)
+ expectedLocationCount, ok := tt.locationCount[nameVersion]
+ if !ok {
+ t.Errorf("missing name-version: %s", nameVersion)
+ continue
+ }
- // we should see merged locations (assumption, there was 1 location for each package)
- assert.Len(t, p.Locations.ToSlice(), expectedLocationCount)
+ // we should see merged locations (assumption, there was 1 location for each package)
+ assert.Len(t, p.Locations.ToSlice(), expectedLocationCount, "unexpected location count for %s", nameVersion)
- // all paths should match
- assert.Len(t, p.Locations.CoordinateSet().Paths(), 1)
+ // all paths should match
+ assert.Len(t, p.Locations.CoordinateSet().Paths(), 1, "unexpected location count for %s", nameVersion)
+ }
}
}
diff --git a/cmd/syft/internal/test/integration/test-fixtures/Makefile b/cmd/syft/internal/test/integration/test-fixtures/Makefile
index 2a75aa43616..7cce0b0d8b4 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/Makefile
+++ b/cmd/syft/internal/test/integration/test-fixtures/Makefile
@@ -1,6 +1,21 @@
-# change these if you want CI to not use previous stored cache
-INTEGRATION_CACHE_BUSTER := "894d8ca"
+FINGERPRINT_FILE := cache.fingerprint
-.PHONY: cache.fingerprint
-cache.fingerprint:
- find image-* -type f -exec md5sum {} + | awk '{print $1}' | sort | tee /dev/stderr | md5sum | tee cache.fingerprint && echo "$(INTEGRATION_CACHE_BUSTER)" >> cache.fingerprint
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures:
+ @echo "nothing to do"
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
+
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find image-* -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+
+# requirement 4: 'clean' goal to remove all generated test fixtures
+.PHONY: clean
+clean:
+ rm -f $(FINGERPRINT_FILE)
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-golang-compiler/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-golang-compiler/Dockerfile
index 2d8e6bbdce5..e73f169b279 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-golang-compiler/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-golang-compiler/Dockerfile
@@ -1 +1,6 @@
-FROM golang:1.18.10-alpine
\ No newline at end of file
+FROM --platform=linux/amd64 golang:1.18.10-alpine
+
+FROM scratch
+
+# we don't need the entire golang toolchain, just a single binary with the stdlib baked in
+COPY --from=0 /usr/local/go/bin/gofmt bin/gofmt
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-java-no-main-package/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-java-no-main-package/Dockerfile
index dce8deba3e2..3271f14cfd8 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-java-no-main-package/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-java-no-main-package/Dockerfile
@@ -1,4 +1,4 @@
-FROM jenkins/jenkins:2.346.3-slim-jdk17@sha256:028fbbd9112c60ed086f5197fcba71992317864d27644e5949cf9c52ff4b65f0
+FROM jenkins/jenkins:2.346.3-slim-jdk17@sha256:028fbbd9112c60ed086f5197fcba71992317864d27644e5949cf9c52ff4b65f0 AS base
USER root
@@ -12,7 +12,7 @@ RUN apt-get update 2>&1 > /dev/null && apt-get install -y less zip 2>&1 > /dev/n
RUN unzip ../jenkins.war 2>&1 > /dev/null
-RUN rm -f ./META-INF/MANIFEST.MF
+RUN rm -rf ./META-INF/MANIFEST.MF ./WEB-INF ./jsbundles ./scripts ./css
WORKDIR /usr/share/jenkins
@@ -21,3 +21,7 @@ RUN rm -rf jenkins.war
RUN cd ./tmp && zip -r ../jenkins.war . && cd ..
RUN rm -rf ./tmp
+
+FROM scratch
+
+COPY --from=base /usr/share/jenkins/jenkins.war /jenkins.war
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-java-virtualpath-regression/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-java-virtualpath-regression/Dockerfile
index 63fc6c92aad..b7990d9d104 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-java-virtualpath-regression/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-java-virtualpath-regression/Dockerfile
@@ -1,7 +1,15 @@
-FROM alpine:3.18.3@sha256:7144f7bab3d4c2648d7e59409f15ec52a18006a128c733fcff20d3a4a54ba44a
+FROM alpine:3.18.3@sha256:7144f7bab3d4c2648d7e59409f15ec52a18006a128c733fcff20d3a4a54ba44a AS base
RUN wget https://repo1.maven.org/maven2/org/jvnet/hudson/main/hudson-war/2.2.1/hudson-war-2.2.1.war
RUN mv hudson-war-2.2.1.war hudson.war
+# let's make this image a little smaller as to not take up so much disk space
+# we'll only keep the jar metadata files (pom data + manifest) and throw away the rest
+RUN apk add --no-cache python3 py3-pip
+COPY extract.py /extract.py
+RUN python extract.py
+FROM scratch
+
+COPY --from=base /slim /
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-java-virtualpath-regression/extract.py b/cmd/syft/internal/test/integration/test-fixtures/image-java-virtualpath-regression/extract.py
new file mode 100644
index 00000000000..e0f005b4ce9
--- /dev/null
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-java-virtualpath-regression/extract.py
@@ -0,0 +1,69 @@
+import os
+import zipfile
+import io
+
+ARCHIVE_EXTENSIONS = ('.jar', '.war', '.ear', '.hpi', '.war', '.sar', '.nar', '.par')
+METADATA_FILES = ('pom.xml', 'pom.properties', 'MANIFEST.MF')
+
+
+def slim_archive(archive, output_dir, base_path="", archive_name=""):
+ """
+ extracts metadata files from the archive and creates a slim JAR file
+ containing only these files. handles nested JARs by preserving them.
+ """
+ slim_buffer = io.BytesIO()
+ with zipfile.ZipFile(archive, 'r') as zip_file:
+ with zipfile.ZipFile(slim_buffer, 'w', zipfile.ZIP_DEFLATED) as slim_zip:
+ for file_name in zip_file.namelist():
+ # check for metadata files or nested JARs
+ if file_name.endswith(METADATA_FILES):
+ # add metadata files directly to the slimmed archive
+ file_data = zip_file.read(file_name)
+ slim_zip.writestr(file_name, file_data)
+ elif file_name.endswith(ARCHIVE_EXTENSIONS):
+ # if it's a nested archive, recursively slim it
+ nested_archive = io.BytesIO(zip_file.read(file_name))
+ nested_slim_buffer = io.BytesIO()
+ slim_archive(
+ nested_archive,
+ nested_slim_buffer,
+ base_path=os.path.join(base_path, os.path.dirname(file_name)),
+ archive_name=os.path.basename(file_name)
+ )
+ # add the slimmed nested archive back to the parent archive
+ nested_slim_buffer.seek(0)
+ slim_zip.writestr(file_name, nested_slim_buffer.read())
+
+ # write out the slimmed JAR to the output directory if output_dir is a directory
+ if isinstance(output_dir, str):
+ output_path = os.path.join(output_dir, base_path, archive_name)
+ os.makedirs(os.path.dirname(output_path), exist_ok=True)
+ with open(output_path, 'wb') as f:
+ slim_buffer.seek(0)
+ f.write(slim_buffer.read())
+ else:
+ # if output_dir is a BytesIO buffer (for nested archives), just write to it
+ output_dir.seek(0)
+ output_dir.write(slim_buffer.getvalue())
+
+
+def walk_directory_and_slim_jars(base_dir, output_dir):
+ """
+ recursively walks through a directory tree looking for .jar, .war, .ear,
+ .hpi files and slims them down by keeping only metadata files.
+ """
+ for dirpath, _, filenames in os.walk(base_dir):
+ for filename in filenames:
+ if filename.endswith(ARCHIVE_EXTENSIONS):
+ archive_path = os.path.join(dirpath, filename)
+ print(f"Processing {archive_path}")
+ slim_archive(archive_path, output_dir, os.path.relpath(dirpath, base_dir), filename)
+
+
+# a helper script for slimming down JAR files by keeping only metadata files but still keeping the jar packaging,
+# including nested JARs! Useful for testing purposes.
+if __name__ == "__main__":
+ BASE_DIR = "."
+ OUTPUT_DIR = "./slim"
+ os.makedirs(OUTPUT_DIR, exist_ok=True)
+ walk_directory_and_slim_jars(BASE_DIR, OUTPUT_DIR)
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-large-apk-data/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-large-apk-data/Dockerfile
index 8187870a863..a8eaca2364d 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-large-apk-data/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-large-apk-data/Dockerfile
@@ -1,4 +1,4 @@
-FROM alpine@sha256:d9a7354e3845ea8466bb00b22224d9116b183e594527fb5b6c3d30bc01a20378
+FROM alpine@sha256:d9a7354e3845ea8466bb00b22224d9116b183e594527fb5b6c3d30bc01a20378 AS base
# we keep these unpinned so that if alpine
# changes our integration tests can adapt
@@ -6,3 +6,8 @@ RUN apk add --no-cache \
tzdata \
vim \
alpine-sdk
+
+# we don't need the installed bins for this test, only the APK installed metadata
+FROM scratch
+
+COPY --from=base /lib/apk/db/installed /lib/apk/db/installed
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-mariner-distroless/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-mariner-distroless/Dockerfile
index 6a6e08f61cc..12e7a416d7f 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-mariner-distroless/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-mariner-distroless/Dockerfile
@@ -1 +1,8 @@
-FROM mcr.microsoft.com/cbl-mariner/distroless/base:2.0.202205275@sha256:f550c5428df17b145851ad75983aca6d613ad4b51ca7983b2a83e67d0ac91a5d
+FROM mcr.microsoft.com/cbl-mariner/distroless/base:2.0.202205275@sha256:f550c5428df17b145851ad75983aca6d613ad4b51ca7983b2a83e67d0ac91a5d AS base
+
+# let's shoot for smaller test fixtures
+FROM scratch
+
+COPY --from=base /var/lib/rpmmanifest/container-manifest-2 /var/lib/rpmmanifest/container-manifest-2
+COPY --from=base /usr/bin/gencat /usr/bin/gencat
+COPY --from=base /usr/bin/openssl /usr/bin/openssl
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-owning-package/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-owning-package/Dockerfile
index 192998626e9..931547acdc5 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-owning-package/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-owning-package/Dockerfile
@@ -1,3 +1,8 @@
-FROM ubuntu:20.04@sha256:33a5cc25d22c45900796a1aca487ad7a7cb09f09ea00b779e3b2026b4fc2faba
+FROM ubuntu:20.04@sha256:33a5cc25d22c45900796a1aca487ad7a7cb09f09ea00b779e3b2026b4fc2faba AS base
# this covers rpm-python
RUN apt-get update && apt-get install -y python-pil=6.2.1-3
+
+# let's save some space...
+FROM scratch
+
+COPY --from=base /var/lib/dpkg/status /var/lib/dpkg/status
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-photon-all-layers/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-photon-all-layers/Dockerfile
index 17bb3691b4c..4910647366c 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-photon-all-layers/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-photon-all-layers/Dockerfile
@@ -1 +1,5 @@
-FROM photon:5.0-20230729@sha256:4cf2a1ce0a3f4625f13a0becb6b9bccfdb014c565be6e9a2ec4c4aad1ff8a5d9
+FROM photon:5.0-20230729@sha256:4cf2a1ce0a3f4625f13a0becb6b9bccfdb014c565be6e9a2ec4c4aad1ff8a5d9 AS base
+
+FROM scratch
+
+COPY --from=base /usr/lib/sysimage/rpm /usr/lib/sysimage/rpm
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-sqlite-rpmdb/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-sqlite-rpmdb/Dockerfile
index 938b431d518..1bda58960be 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-sqlite-rpmdb/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-sqlite-rpmdb/Dockerfile
@@ -1 +1,6 @@
-FROM fedora:35@sha256:36af84ba69e21c9ef86a0424a090674c433b2b80c2462e57503886f1d823abe8
+FROM fedora:35@sha256:36af84ba69e21c9ef86a0424a090674c433b2b80c2462e57503886f1d823abe8 AS base
+
+# lets save some space
+FROM scratch
+
+COPY --from=base /var/lib/rpm /var/lib/rpm
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-suse-all-layers/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-suse-all-layers/Dockerfile
index 339983d8800..0c4f1165606 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-suse-all-layers/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-suse-all-layers/Dockerfile
@@ -1,2 +1,11 @@
-FROM registry.suse.com/suse/sle15:15.3.17.20.20@sha256:fd657ecbab5ca564d6933e887f6ae8542a9398e6a4b399f352ce10c3a24afc64
+FROM registry.suse.com/suse/sle15:15.3.17.20.20@sha256:fd657ecbab5ca564d6933e887f6ae8542a9398e6a4b399f352ce10c3a24afc64 AS base
RUN zypper in -y wget
+
+# let's save some space... we really just need an image that has an RPM DB that is linked across layers
+FROM --platform=linux/amd64 busybox:1.36.1
+
+# setup a link /var/lib/rpm -> ../../usr/lib/sysimage/rpm
+RUN mkdir -p /var/lib && ln -s ../../usr/lib/sysimage/rpm /var/lib/rpm
+
+# copy the RPM DB from the SUSE image
+COPY --from=base /usr/lib/sysimage/rpm/Packages.db /usr/lib/sysimage/rpm/Packages.db
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-test-java-purls/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-test-java-purls/Dockerfile
index 16f074b362f..4e4cb13885b 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-test-java-purls/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-test-java-purls/Dockerfile
@@ -1,3 +1,18 @@
-FROM docker.io/anchore/test_images:java-88948cc@sha256:dea0e6c24636937f53bdc997d9960c2a18966d1e38bcd8ebd0c395d4e169b806
+FROM docker.io/anchore/test_images:java-88948cc@sha256:dea0e6c24636937f53bdc997d9960c2a18966d1e38bcd8ebd0c395d4e169b806 AS base
-RUN rm /packages/gradle-7.1.1-bin.zip
\ No newline at end of file
+# not covered in testing...
+RUN rm /packages/gradle-7.1.1-bin.zip
+
+RUN apk add --no-cache python3 py3-pip
+
+COPY extract.py /extract.py
+
+WORKDIR /
+
+# let's make this image a little smaller as to not take up so much disk space
+# we'll only keep the jar metadata files (pom data + manifest) and throw away the rest
+RUN python extract.py
+
+FROM scratch
+
+COPY --from=base /slim/packages /packages
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-test-java-purls/extract.py b/cmd/syft/internal/test/integration/test-fixtures/image-test-java-purls/extract.py
new file mode 100644
index 00000000000..e0f005b4ce9
--- /dev/null
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-test-java-purls/extract.py
@@ -0,0 +1,69 @@
+import os
+import zipfile
+import io
+
+ARCHIVE_EXTENSIONS = ('.jar', '.war', '.ear', '.hpi', '.war', '.sar', '.nar', '.par')
+METADATA_FILES = ('pom.xml', 'pom.properties', 'MANIFEST.MF')
+
+
+def slim_archive(archive, output_dir, base_path="", archive_name=""):
+ """
+ extracts metadata files from the archive and creates a slim JAR file
+ containing only these files. handles nested JARs by preserving them.
+ """
+ slim_buffer = io.BytesIO()
+ with zipfile.ZipFile(archive, 'r') as zip_file:
+ with zipfile.ZipFile(slim_buffer, 'w', zipfile.ZIP_DEFLATED) as slim_zip:
+ for file_name in zip_file.namelist():
+ # check for metadata files or nested JARs
+ if file_name.endswith(METADATA_FILES):
+ # add metadata files directly to the slimmed archive
+ file_data = zip_file.read(file_name)
+ slim_zip.writestr(file_name, file_data)
+ elif file_name.endswith(ARCHIVE_EXTENSIONS):
+ # if it's a nested archive, recursively slim it
+ nested_archive = io.BytesIO(zip_file.read(file_name))
+ nested_slim_buffer = io.BytesIO()
+ slim_archive(
+ nested_archive,
+ nested_slim_buffer,
+ base_path=os.path.join(base_path, os.path.dirname(file_name)),
+ archive_name=os.path.basename(file_name)
+ )
+ # add the slimmed nested archive back to the parent archive
+ nested_slim_buffer.seek(0)
+ slim_zip.writestr(file_name, nested_slim_buffer.read())
+
+ # write out the slimmed JAR to the output directory if output_dir is a directory
+ if isinstance(output_dir, str):
+ output_path = os.path.join(output_dir, base_path, archive_name)
+ os.makedirs(os.path.dirname(output_path), exist_ok=True)
+ with open(output_path, 'wb') as f:
+ slim_buffer.seek(0)
+ f.write(slim_buffer.read())
+ else:
+ # if output_dir is a BytesIO buffer (for nested archives), just write to it
+ output_dir.seek(0)
+ output_dir.write(slim_buffer.getvalue())
+
+
+def walk_directory_and_slim_jars(base_dir, output_dir):
+ """
+ recursively walks through a directory tree looking for .jar, .war, .ear,
+ .hpi files and slims them down by keeping only metadata files.
+ """
+ for dirpath, _, filenames in os.walk(base_dir):
+ for filename in filenames:
+ if filename.endswith(ARCHIVE_EXTENSIONS):
+ archive_path = os.path.join(dirpath, filename)
+ print(f"Processing {archive_path}")
+ slim_archive(archive_path, output_dir, os.path.relpath(dirpath, base_dir), filename)
+
+
+# a helper script for slimming down JAR files by keeping only metadata files but still keeping the jar packaging,
+# including nested JARs! Useful for testing purposes.
+if __name__ == "__main__":
+ BASE_DIR = "."
+ OUTPUT_DIR = "./slim"
+ os.makedirs(OUTPUT_DIR, exist_ok=True)
+ walk_directory_and_slim_jars(BASE_DIR, OUTPUT_DIR)
diff --git a/cmd/syft/internal/test/integration/test-fixtures/image-vertical-package-dups/Dockerfile b/cmd/syft/internal/test/integration/test-fixtures/image-vertical-package-dups/Dockerfile
index cd0e69b5de8..28f95ba5949 100644
--- a/cmd/syft/internal/test/integration/test-fixtures/image-vertical-package-dups/Dockerfile
+++ b/cmd/syft/internal/test/integration/test-fixtures/image-vertical-package-dups/Dockerfile
@@ -1,6 +1,27 @@
-FROM centos:7.9.2009@sha256:be65f488b7764ad3638f236b7b515b3678369a5124c47b8d32916d6487418ea4
+FROM --platform=linux/amd64 rockylinux:9.3.20231119@sha256:d644d203142cd5b54ad2a83a203e1dee68af2229f8fe32f52a30c6e1d3c3a9e0 AS base
+
# modifying the RPM DB multiple times will result in duplicate packages when using all-layers (if there was no de-dup logic)
# curl is tricky, it already exists in the image and is being upgraded
-RUN yum install -y wget-1.14-18.el7_6.1 curl-7.29.0-59.el7_9.1
-RUN yum install -y vsftpd-3.0.2-29.el7_9
-RUN yum install -y httpd-2.4.6-97.el7.centos.5
+
+# but... we want to make the test image as small as possible, so we are making the changes in stages and then
+# copying the RPM DB from each stage to a final stage in separate layers. This will result in a much smaller image.
+
+FROM base AS stage1
+RUN dnf install -y wget
+
+FROM stage1 AS stage2
+RUN dnf update -y curl-minimal
+
+FROM stage2 AS stage3
+RUN dnf install -y vsftpd
+
+FROM stage3 AS stage4
+RUN dnf install -y httpd
+
+FROM scratch
+
+COPY --from=base /var/lib/rpm /var/lib/rpm
+COPY --from=stage1 /var/lib/rpm /var/lib/rpm
+COPY --from=stage2 /var/lib/rpm /var/lib/rpm
+COPY --from=stage3 /var/lib/rpm /var/lib/rpm
+COPY --from=stage4 /var/lib/rpm /var/lib/rpm
diff --git a/go.mod b/go.mod
index 7cb63cf530d..f6fc5241121 100644
--- a/go.mod
+++ b/go.mod
@@ -88,6 +88,7 @@ require google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17 // indirec
require (
github.com/BurntSushi/toml v1.4.0
+ github.com/OneOfOne/xxhash v1.2.8
github.com/adrg/xdg v0.5.0
github.com/magiconair/properties v1.8.7
golang.org/x/exp v0.0.0-20231108232855-2478ac86f678
diff --git a/go.sum b/go.sum
index d3dc4b7d916..61e20d6dd3f 100644
--- a/go.sum
+++ b/go.sum
@@ -79,6 +79,8 @@ github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5
github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8=
github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
+github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8=
+github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78=
github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
diff --git a/internal/task/executor.go b/internal/task/executor.go
index 2935f61b12c..899796424be 100644
--- a/internal/task/executor.go
+++ b/internal/task/executor.go
@@ -7,9 +7,9 @@ import (
"sync"
"time"
- "github.com/anchore/syft/internal/log"
"github.com/hashicorp/go-multierror"
+ "github.com/anchore/syft/internal/log"
"github.com/anchore/syft/internal/sbomsync"
"github.com/anchore/syft/syft/event/monitor"
"github.com/anchore/syft/syft/file"
diff --git a/syft/file/cataloger/executable/test-fixtures/Makefile b/syft/file/cataloger/executable/test-fixtures/Makefile
new file mode 100644
index 00000000000..da3e730e131
--- /dev/null
+++ b/syft/file/cataloger/executable/test-fixtures/Makefile
@@ -0,0 +1,15 @@
+.DEFAULT_GOAL := default
+
+default:
+ @for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
+ if [ -f "$$dir/Makefile" ]; then \
+ $(MAKE) -C $$dir; \
+ fi; \
+ done
+
+%:
+ @for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
+ if [ -f "$$dir/Makefile" ]; then \
+ $(MAKE) -C $$dir $@; \
+ fi; \
+ done
diff --git a/syft/file/cataloger/executable/test-fixtures/elf/Makefile b/syft/file/cataloger/executable/test-fixtures/elf/Makefile
index 1cff6183e1e..5130c8faccb 100644
--- a/syft/file/cataloger/executable/test-fixtures/elf/Makefile
+++ b/syft/file/cataloger/executable/test-fixtures/elf/Makefile
@@ -1,8 +1,19 @@
BIN=./bin
TOOL_IMAGE=localhost/syft-bin-build-tools:latest
VERIFY_FILE=actual_verify
+FINGERPRINT_FILE=$(BIN).fingerprint
-all: build verify
+ifndef BIN
+ $(error BIN is not set)
+endif
+
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures: build verify
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
tools-check:
@sha256sum -c Dockerfile.sha256 || (echo "Tools Dockerfile has changed" && exit 1)
@@ -25,10 +36,14 @@ verify: tools
debug:
docker run -i --rm -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) bash
-cache.fingerprint:
- @find project Dockerfile Makefile -type f -exec md5sum {} + | awk '{print $1}' | sort | tee cache.fingerprint
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find project Dockerfile Makefile -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+# requirement 4: 'clean' goal to remove all generated test fixtures
clean:
- rm -f $(BIN)/*
+ rm -rf $(BIN) Dockerfile.sha256 $(VERIFY_FILE) $(FINGERPRINT_FILE)
-.PHONY: build verify debug build-image build-bins clean dockerfile-check cache.fingerprint
+.PHONY: tools tools-check build verify debug clean
\ No newline at end of file
diff --git a/syft/file/cataloger/executable/test-fixtures/shared-info/Makefile b/syft/file/cataloger/executable/test-fixtures/shared-info/Makefile
index a3d5959c358..8321e0ae09e 100644
--- a/syft/file/cataloger/executable/test-fixtures/shared-info/Makefile
+++ b/syft/file/cataloger/executable/test-fixtures/shared-info/Makefile
@@ -1,8 +1,20 @@
BIN=./bin
TOOL_IMAGE=localhost/syft-shared-info-build-tools:latest
VERIFY_FILE=actual_verify
+FINGERPRINT_FILE=$(BIN).fingerprint
+
+ifndef BIN
+ $(error BIN is not set)
+endif
+
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures: build
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
-all: build
tools-check:
@sha256sum -c Dockerfile.sha256 || (echo "Tools Dockerfile has changed" && exit 1)
@@ -10,16 +22,20 @@ tools:
@(docker inspect $(TOOL_IMAGE) > /dev/null && make tools-check) || (docker build -t $(TOOL_IMAGE) . && sha256sum Dockerfile > Dockerfile.sha256)
build: tools
- mkdir -p $(BIN)
+ @mkdir -p $(BIN)
docker run --platform linux/amd64 -i -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) make
debug:
docker run --platform linux/amd64 -i --rm -v $(shell pwd):/mount -w /mount/project $(TOOL_IMAGE) bash
-cache.fingerprint:
- @find project Dockerfile Makefile -type f -exec md5sum {} + | awk '{print $1}' | sort | tee cache.fingerprint
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find project Dockerfile Makefile -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+# requirement 4: 'clean' goal to remove all generated test fixtures
clean:
- rm -f $(BIN)/*
+ rm -rf $(BIN) Dockerfile.sha256 $(VERIFY_FILE) $(FINGERPRINT_FILE)
-.PHONY: build verify debug build-image build-bins clean dockerfile-check cache.fingerprint
+.PHONY: tools tools-check build debug clean
diff --git a/syft/format/text/test-fixtures/image-simple/Dockerfile b/syft/format/text/test-fixtures/image-simple/Dockerfile
deleted file mode 100644
index 79cfa759e35..00000000000
--- a/syft/format/text/test-fixtures/image-simple/Dockerfile
+++ /dev/null
@@ -1,4 +0,0 @@
-# Note: changes to this file will result in updating several test values. Consider making a new image fixture instead of editing this one.
-FROM scratch
-ADD file-1.txt /somefile-1.txt
-ADD file-2.txt /somefile-2.txt
diff --git a/syft/format/text/test-fixtures/image-simple/file-1.txt b/syft/format/text/test-fixtures/image-simple/file-1.txt
deleted file mode 100644
index 985d3408e98..00000000000
--- a/syft/format/text/test-fixtures/image-simple/file-1.txt
+++ /dev/null
@@ -1 +0,0 @@
-this file has contents
\ No newline at end of file
diff --git a/syft/format/text/test-fixtures/image-simple/file-2.txt b/syft/format/text/test-fixtures/image-simple/file-2.txt
deleted file mode 100644
index 396d08bbc72..00000000000
--- a/syft/format/text/test-fixtures/image-simple/file-2.txt
+++ /dev/null
@@ -1 +0,0 @@
-file-2 contents!
\ No newline at end of file
diff --git a/syft/format/text/test-fixtures/snapshot/TestTextImageEncoder.golden b/syft/format/text/test-fixtures/snapshot/TestTextImageEncoder.golden
index 4ab3a446e0c..0c49cecc049 100644
--- a/syft/format/text/test-fixtures/snapshot/TestTextImageEncoder.golden
+++ b/syft/format/text/test-fixtures/snapshot/TestTextImageEncoder.golden
@@ -1,11 +1,11 @@
[Image]
Layer: 0
- Digest: sha256:fb6beecb75b39f4bb813dbf177e501edd5ddb3e69bb45cedeb78c676ee1b7a59
+ Digest: sha256:100d5a55f9032faead28b7427fa3e650e4f0158f86ea89d06e1489df00cb8c6f
Size: 22
MediaType: application/vnd.docker.image.rootfs.diff.tar.gzip
Layer: 1
- Digest: sha256:319b588ce64253a87b533c8ed01cf0025e0eac98e7b516e12532957e1244fdec
+ Digest: sha256:000fb9200890d3a19138478b20023023c0dce1c54352007c2863716780f049eb
Size: 16
MediaType: application/vnd.docker.image.rootfs.diff.tar.gzip
diff --git a/syft/pkg/cataloger/binary/test-fixtures/.gitignore b/syft/pkg/cataloger/binary/test-fixtures/.gitignore
index e1d59c126c8..4d4d11ec993 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/.gitignore
+++ b/syft/pkg/cataloger/binary/test-fixtures/.gitignore
@@ -1,6 +1,5 @@
classifiers/dynamic
classifiers/bin
-cache.fingerprint
# allow for lb patterns (rust, pytho, php and more)
!lib*.so
diff --git a/syft/pkg/cataloger/binary/test-fixtures/Makefile b/syft/pkg/cataloger/binary/test-fixtures/Makefile
index 3e8efed9468..fa37d43c16b 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/Makefile
+++ b/syft/pkg/cataloger/binary/test-fixtures/Makefile
@@ -1,8 +1,14 @@
-.PHONY: default list download download-all cache.fingerprint
+BIN=classifiers/bin
+FINGERPRINT_FILE=$(BIN).fingerprint
-.DEFAULT_GOAL := default
-default: download
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures: download
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: clean-fingerprint $(FINGERPRINT_FILE)
list: ## list all managed binaries and snippets
go run ./manager list
@@ -16,14 +22,23 @@ download-all: ## download all managed binaries
add-snippet: ## add a new snippet from an existing binary
go run ./manager add-snippet
-cache.fingerprint: ## prints the sha256sum of the any input to the download command (to determine if there is a cache miss)
- @cat ./config.yaml | sha256sum | awk '{print $$1}' | tee cache.fingerprint
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE): ## prints the sha256sum of the any input to the download command (to determine if there is a cache miss)
+ @sha256sum ./config.yaml > $(FINGERPRINT_FILE)
+
+# requirement 4: 'clean' goal to remove all generated test fixtures
+clean: ## clean up all downloaded binaries
+ rm -rf $(BIN)
+
+clean-fingerprint: ## clean up all legacy fingerprint files
+ @find $(BIN) -name '*.fingerprint' -delete
-clean: ## clean up all downloaded binaries
- rm -rf ./classifiers/bin
## Halp! #################################
.PHONY: help
help:
- @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "$(BOLD)$(CYAN)%-25s$(RESET)%s\n", $$1, $$2}'
\ No newline at end of file
+ @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "$(BOLD)$(CYAN)%-25s$(RESET)%s\n", $$1, $$2}'
+
+.PHONY: default list download download-all clean clean-fingerprint add-snippet fingerprint
\ No newline at end of file
diff --git a/syft/pkg/cataloger/binary/test-fixtures/classifiers/snippets/traefik/3.0.4/linux-riscv64/traefik b/syft/pkg/cataloger/binary/test-fixtures/classifiers/snippets/traefik/3.0.4/linux-riscv64/traefik
new file mode 100644
index 00000000000..f361c692988
Binary files /dev/null and b/syft/pkg/cataloger/binary/test-fixtures/classifiers/snippets/traefik/3.0.4/linux-riscv64/traefik differ
diff --git a/syft/pkg/cataloger/binary/test-fixtures/config.yaml b/syft/pkg/cataloger/binary/test-fixtures/config.yaml
index 78d8ba4b8cf..ac433555c22 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/config.yaml
+++ b/syft/pkg/cataloger/binary/test-fixtures/config.yaml
@@ -85,6 +85,7 @@ from-images:
paths:
- /usr/local/go/bin/go
+ # TODO: this is no longer available from dockerhub! (the snippet is vital)
- version: 1.5.14
images:
- ref: haproxy:1.5.14@sha256:3d57e3921cc84e860f764e863ce729dd0765e3d28d444775127bc42d68f98e10
diff --git a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile
index a5efa56eb3f..42a74837c5a 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile
+++ b/syft/pkg/cataloger/binary/test-fixtures/elf-test-fixtures/Dockerfile
@@ -1,14 +1,31 @@
-FROM rockylinux:8
+FROM rockylinux:8 AS base
+
RUN dnf update -y; \
dnf install make automake gcc gcc-c++ kernel-devel -y; \
dnf clean all
RUN mkdir -p /usr/local/bin/elftests/elfbinwithnestedlib
RUN mkdir -p /usr/local/bin/elftests/elfbinwithsisterlib
+
COPY ./elfbinwithnestedlib /usr/local/bin/elftests/elfbinwithnestedlib
COPY ./elfbinwithsisterlib /usr/local/bin/elftests/elfbinwithsisterlib
+
ENV LD_LIBRARY_PATH=/usr/local/bin/elftests/elfbinwithnestedlib/bin/lib
+
WORKDIR /usr/local/bin/elftests/elfbinwithnestedlib/
RUN make
+
WORKDIR /usr/local/bin/elftests/elfbinwithsisterlib
RUN make
+# let's make the test image smaller, since we only require the built binaries and supporting libraries
+FROM busybox:1.36.1-musl
+
+COPY --from=base /usr/local/bin/elftests /usr/local/bin/elftests
+COPY --from=base /var/lib/rpm /var/lib/rpm
+COPY --from=base '/usr/lib64/libstdc++.so.6.0.25' '/usr/lib64/libstdc++.so.6.0.25'
+COPY --from=base '/usr/lib64/libstdc++.so.6' '/usr/lib64/libstdc++.so.6'
+COPY --from=base '/usr/lib64/libc.so.6' '/usr/lib64/libc.so.6'
+COPY --from=base '/usr/lib64/libc.so' '/usr/lib64/libc.so'
+
+# prove we can operate over symlinks (/lib64 -> usr/lib64)
+RUN ln -s /usr/lib64 /lib64
diff --git a/syft/pkg/cataloger/binary/test-fixtures/image-fedora-32bit/Dockerfile b/syft/pkg/cataloger/binary/test-fixtures/image-fedora-32bit/Dockerfile
index e89c76124bb..0df726644a7 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/image-fedora-32bit/Dockerfile
+++ b/syft/pkg/cataloger/binary/test-fixtures/image-fedora-32bit/Dockerfile
@@ -1,4 +1,4 @@
-FROM --platform=linux/arm arm32v7/fedora:36 as build
+FROM --platform=linux/arm arm32v7/fedora:36 AS build
FROM scratch
COPY --from=build /bin/sha256sum /sha256sum
diff --git a/syft/pkg/cataloger/binary/test-fixtures/image-fedora-64bit/Dockerfile b/syft/pkg/cataloger/binary/test-fixtures/image-fedora-64bit/Dockerfile
index 0d65e734110..bd9694091d8 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/image-fedora-64bit/Dockerfile
+++ b/syft/pkg/cataloger/binary/test-fixtures/image-fedora-64bit/Dockerfile
@@ -1,4 +1,4 @@
-FROM --platform=linux/amd64 fedora:41@sha256:c05bf79137835bf5c521c58f8252d6031780ae865a0379ab57f412e0ac6b42aa as build
+FROM --platform=linux/amd64 fedora:41@sha256:c05bf79137835bf5c521c58f8252d6031780ae865a0379ab57f412e0ac6b42aa AS build
FROM scratch
diff --git a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image.go b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image.go
index f26ac3ae40a..dc558250186 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image.go
+++ b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image.go
@@ -1,11 +1,11 @@
package config
import (
- "crypto/sha256"
"fmt"
"path/filepath"
"strings"
+ "github.com/OneOfOne/xxhash"
"gopkg.in/yaml.v3"
)
@@ -68,13 +68,13 @@ func PlatformAsValue(platform string) string {
return strings.ReplaceAll(platform, "/", "-")
}
-func (c BinaryFromImage) Fingerprint() string {
+func (c BinaryFromImage) Digest() string {
by, err := yaml.Marshal(c)
if err != nil {
panic(err)
}
- hasher := sha256.New()
- hasher.Write(by)
+ hasher := xxhash.New64()
+ _, _ = hasher.Write(by)
return fmt.Sprintf("%x", hasher.Sum(nil))
}
diff --git a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image_test.go b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image_test.go
index 8d76d5a2b29..55bb3ee40c2 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image_test.go
+++ b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/config/binary_from_image_test.go
@@ -158,7 +158,7 @@ func TestPlatformAsValue(t *testing.T) {
}
}
-func TestFingerprint(t *testing.T) {
+func TestDigest(t *testing.T) {
tests := []struct {
name string
binary BinaryFromImage
@@ -179,13 +179,13 @@ func TestFingerprint(t *testing.T) {
"path/to/test",
},
},
- expected: "54ed081c07e4eba031afed4c04315cf96047822196473971be98d0769a0e3645",
+ expected: "fc25c48e3d2f01e3",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- assert.Equal(t, tt.expected, tt.binary.Fingerprint())
+ assert.Equal(t, tt.expected, tt.binary.Digest())
})
}
}
diff --git a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image.go b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image.go
index 32b9c83d6dd..1d5a667eabf 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image.go
+++ b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image.go
@@ -2,6 +2,7 @@ package internal
import (
"encoding/json"
+ "errors"
"fmt"
"os"
"os/exec"
@@ -14,6 +15,8 @@ import (
"github.com/anchore/syft/syft/pkg/cataloger/binary/test-fixtures/manager/internal/ui"
)
+const digestFileSuffix = ".xxh64"
+
func DownloadFromImage(dest string, config config.BinaryFromImage) error {
t := ui.Title{Name: config.Name(), Version: config.Version}
t.Start()
@@ -39,22 +42,22 @@ func DownloadFromImage(dest string, config config.BinaryFromImage) error {
}
func isDownloadStale(config config.BinaryFromImage, binaryPaths []string) bool {
- currentFingerprint := config.Fingerprint()
+ currentDigest := config.Digest()
for _, path := range binaryPaths {
- fingerprintPath := path + ".fingerprint"
- if _, err := os.Stat(fingerprintPath); err != nil {
+ digestPath := path + digestFileSuffix
+ if _, err := os.Stat(digestPath); err != nil {
// missing a fingerprint file means the download is stale
return true
}
- writtenFingerprint, err := os.ReadFile(fingerprintPath)
+ writtenDigest, err := os.ReadFile(digestPath)
if err != nil {
// missing a fingerprint file means the download is stale
return true
}
- if string(writtenFingerprint) != currentFingerprint {
+ if string(writtenDigest) != currentDigest {
// the fingerprint file does not match the current fingerprint, so the download is stale
return true
}
@@ -103,6 +106,12 @@ func pullDockerImage(imageReference, platform string) error {
cmd := exec.Command("docker", "pull", "--platform", platform, imageReference)
err := cmd.Run()
if err != nil {
+ // attach stderr to output message
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) && len(exitErr.Stderr) > 0 {
+ err = fmt.Errorf("pull failed: %w:\n%s", err, exitErr.Stderr)
+ }
+
a.Done(err)
return err
}
@@ -152,6 +161,12 @@ func copyBinariesFromDockerImage(config config.BinaryFromImage, destination stri
cmd := exec.Command("docker", "create", "--name", containerName, image.Reference)
if err = cmd.Run(); err != nil {
+ // attach stderr to output message
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) && len(exitErr.Stderr) > 0 {
+ err = fmt.Errorf("%w:\n%s", err, exitErr.Stderr)
+ }
+
return err
}
@@ -162,7 +177,7 @@ func copyBinariesFromDockerImage(config config.BinaryFromImage, destination stri
for i, destinationPath := range config.AllStorePathsForImage(image, destination) {
path := config.PathsInImage[i]
- if err := copyBinaryFromContainer(containerName, path, destinationPath, config.Fingerprint()); err != nil {
+ if err := copyBinaryFromContainer(containerName, path, destinationPath, config.Digest()); err != nil {
return err
}
}
@@ -170,7 +185,7 @@ func copyBinariesFromDockerImage(config config.BinaryFromImage, destination stri
return nil
}
-func copyBinaryFromContainer(containerName, containerPath, destinationPath, fingerprint string) (err error) {
+func copyBinaryFromContainer(containerName, containerPath, destinationPath, digest string) (err error) {
a := ui.Action{Msg: fmt.Sprintf("extract %s", containerPath)}
a.Start()
@@ -185,13 +200,24 @@ func copyBinaryFromContainer(containerName, containerPath, destinationPath, fing
cmd := exec.Command("docker", "cp", fmt.Sprintf("%s:%s", containerName, containerPath), destinationPath) //nolint:gosec
// reason for gosec exception: this is for processing test fixtures only, not used in production
if err := cmd.Run(); err != nil {
+ // attach stderr to output message
+ var exitErr *exec.ExitError
+ if errors.As(err, &exitErr) && len(exitErr.Stderr) > 0 {
+ err = fmt.Errorf("%w:\n%s", err, exitErr.Stderr)
+ }
+
return err
}
- // capture fingerprint file
- fingerprintPath := destinationPath + ".fingerprint"
- if err := os.WriteFile(fingerprintPath, []byte(fingerprint), 0600); err != nil {
- return fmt.Errorf("unable to write fingerprint file: %w", err)
+ // ensure permissions are 600 for destination
+ if err := os.Chmod(destinationPath, 0600); err != nil {
+ return fmt.Errorf("unable to set permissions on file %q: %w", destinationPath, err)
+ }
+
+ // capture digest file
+ digestPath := destinationPath + digestFileSuffix
+ if err := os.WriteFile(digestPath, []byte(digest), 0600); err != nil {
+ return fmt.Errorf("unable to write digest file: %w", err)
}
return nil
diff --git a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image_test.go b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image_test.go
index ca62ea5476a..fc097a7dbf6 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image_test.go
+++ b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/download_from_image_test.go
@@ -14,35 +14,35 @@ import (
func TestIsDownloadStale(t *testing.T) {
cases := []struct {
- name string
- fingerprint string
- expected bool
+ name string
+ digest string
+ expected bool
}{
{
- name: "no fingerprint",
- fingerprint: "",
- expected: true,
+ name: "no digest",
+ digest: "",
+ expected: true,
},
{
- name: "fingerprint matches",
- // this is the fingerprint for config in the loop body
- fingerprint: "5177d458eaca031ea16fa707841043df2e31b89be6bae7ea41290aa32f0251a6",
- expected: false,
+ name: "digest matches",
+ // this is the digest for config in the loop body
+ digest: "c9c8007f9c55c2f1",
+ expected: false,
},
{
- name: "fingerprint does not match",
- fingerprint: "fingerprint",
- expected: true,
+ name: "digest does not match",
+ digest: "bogus",
+ expected: true,
},
}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
binaryPath := filepath.Join(t.TempDir(), "binary")
- fh, err := os.Create(binaryPath + ".fingerprint")
+ fh, err := os.Create(binaryPath + digestFileSuffix)
require.NoError(t, err)
- fh.Write([]byte(tt.fingerprint))
+ fh.Write([]byte(tt.digest))
require.NoError(t, fh.Close())
cfg := config.BinaryFromImage{
diff --git a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/list_entries.go b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/list_entries.go
index 7d6c2063045..9ecf254401e 100644
--- a/syft/pkg/cataloger/binary/test-fixtures/manager/internal/list_entries.go
+++ b/syft/pkg/cataloger/binary/test-fixtures/manager/internal/list_entries.go
@@ -170,7 +170,7 @@ func getLogicalKey(managedBinaryPath string) (*LogicalEntryKey, error) {
func allFilePaths(root string) ([]string, error) {
var paths []string
err := filepath.Walk(root, func(path string, info os.FileInfo, _ error) error {
- if info != nil && !info.IsDir() && !strings.HasSuffix(path, ".fingerprint") {
+ if info != nil && !info.IsDir() && !strings.HasSuffix(path, digestFileSuffix) {
paths = append(paths, path)
}
return nil
diff --git a/syft/pkg/cataloger/gentoo/cataloger_test.go b/syft/pkg/cataloger/gentoo/cataloger_test.go
index f2deeb199b4..6fb80e78078 100644
--- a/syft/pkg/cataloger/gentoo/cataloger_test.go
+++ b/syft/pkg/cataloger/gentoo/cataloger_test.go
@@ -64,7 +64,7 @@ func TestPortageCataloger(t *testing.T) {
var expectedRelationships []artifact.Relationship
pkgtest.NewCatalogTester().
- FromDirectory(t, "test-fixtures/image-portage").
+ FromDirectory(t, "test-fixtures/layout").
Expects(expectedPkgs, expectedRelationships).
TestCataloger(t, NewPortageCataloger())
diff --git a/syft/pkg/cataloger/gentoo/test-fixtures/image-portage/var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS b/syft/pkg/cataloger/gentoo/test-fixtures/layout/var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS
similarity index 100%
rename from syft/pkg/cataloger/gentoo/test-fixtures/image-portage/var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS
rename to syft/pkg/cataloger/gentoo/test-fixtures/layout/var/db/pkg/app-containers/skopeo-1.5.1/CONTENTS
diff --git a/syft/pkg/cataloger/gentoo/test-fixtures/image-portage/var/db/pkg/app-containers/skopeo-1.5.1/LICENSE b/syft/pkg/cataloger/gentoo/test-fixtures/layout/var/db/pkg/app-containers/skopeo-1.5.1/LICENSE
similarity index 100%
rename from syft/pkg/cataloger/gentoo/test-fixtures/image-portage/var/db/pkg/app-containers/skopeo-1.5.1/LICENSE
rename to syft/pkg/cataloger/gentoo/test-fixtures/layout/var/db/pkg/app-containers/skopeo-1.5.1/LICENSE
diff --git a/syft/pkg/cataloger/gentoo/test-fixtures/image-portage/var/db/pkg/app-containers/skopeo-1.5.1/SIZE b/syft/pkg/cataloger/gentoo/test-fixtures/layout/var/db/pkg/app-containers/skopeo-1.5.1/SIZE
similarity index 100%
rename from syft/pkg/cataloger/gentoo/test-fixtures/image-portage/var/db/pkg/app-containers/skopeo-1.5.1/SIZE
rename to syft/pkg/cataloger/gentoo/test-fixtures/layout/var/db/pkg/app-containers/skopeo-1.5.1/SIZE
diff --git a/syft/pkg/cataloger/golang/test-fixtures/Makefile b/syft/pkg/cataloger/golang/test-fixtures/Makefile
new file mode 100644
index 00000000000..da3e730e131
--- /dev/null
+++ b/syft/pkg/cataloger/golang/test-fixtures/Makefile
@@ -0,0 +1,15 @@
+.DEFAULT_GOAL := default
+
+default:
+ @for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
+ if [ -f "$$dir/Makefile" ]; then \
+ $(MAKE) -C $$dir; \
+ fi; \
+ done
+
+%:
+ @for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
+ if [ -f "$$dir/Makefile" ]; then \
+ $(MAKE) -C $$dir $@; \
+ fi; \
+ done
diff --git a/syft/pkg/cataloger/golang/test-fixtures/archs/Makefile b/syft/pkg/cataloger/golang/test-fixtures/archs/Makefile
index 60eee7ff96a..872f2be9176 100644
--- a/syft/pkg/cataloger/golang/test-fixtures/archs/Makefile
+++ b/syft/pkg/cataloger/golang/test-fixtures/archs/Makefile
@@ -1,29 +1,39 @@
DESTINATION=binaries
+FINGERPRINT_FILE=$(DESTINATION).fingerprint
-all: $(DESTINATION)/hello-mach-o-arm64 $(DESTINATION)/hello-linux-arm $(DESTINATION)/hello-linux-ppc64le $(DESTINATION)/hello-win-amd64
+ifndef DESTINATION
+ $(error DESTINATION is not set)
+endif
+
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures: $(DESTINATION)
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(DESTINATION).fingerprint
+
+$(DESTINATION): $(DESTINATION)/hello-mach-o-arm64 $(DESTINATION)/hello-linux-arm $(DESTINATION)/hello-linux-ppc64le $(DESTINATION)/hello-win-amd64
$(DESTINATION)/hello-mach-o-arm64:
- mkdir -p $(DESTINATION)
GOARCH=arm64 GOOS=darwin ./src/build.sh $(DESTINATION)/hello-mach-o-arm64
$(DESTINATION)/hello-linux-arm:
- mkdir -p $(DESTINATION)
GOARCH=arm GOOS=linux ./src/build.sh $(DESTINATION)/hello-linux-arm
$(DESTINATION)/hello-linux-ppc64le:
- mkdir -p $(DESTINATION)
GOARCH=ppc64le GOOS=linux ./src/build.sh $(DESTINATION)/hello-linux-ppc64le
$(DESTINATION)/hello-win-amd64:
- mkdir -p $(DESTINATION)
GOARCH=amd64 GOOS=windows ./src/build.sh $(DESTINATION)/hello-win-amd64
-# we need a way to determine if CI should bust the test cache based on the source material
-$(DESTINATION).fingerprint: clean
- mkdir -p $(DESTINATION)
- find src -type f -exec sha256sum {} \; | sort | tee /dev/stderr | tee $(DESTINATION).fingerprint
- sha256sum $(DESTINATION).fingerprint
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find src -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+# requirement 4: 'clean' goal to remove all generated test fixtures
.PHONY: clean
clean:
- rm -f $(DESTINATION)/*
+ rm -rf $(DESTINATION)
diff --git a/syft/pkg/cataloger/golang/test-fixtures/archs/src/build.sh b/syft/pkg/cataloger/golang/test-fixtures/archs/src/build.sh
index 8a3919470b3..a740b7dba02 100755
--- a/syft/pkg/cataloger/golang/test-fixtures/archs/src/build.sh
+++ b/syft/pkg/cataloger/golang/test-fixtures/archs/src/build.sh
@@ -1,10 +1,13 @@
#!/usr/bin/env bash
-set -uxe
+set -ue
# note: this can be easily done in a 1-liner, however circle CI does NOT allow volume mounts from the host in docker executors (since they are on remote hosts, where the host files are inaccessible)
# note: gocache override is so we can run docker build not as root in a container without permission issues
BINARY=$1
+
+mkdir -p "$(dirname "$BINARY")"
+
CTRID=$(docker create -e GOOS="${GOOS}" -e GOARCH="${GOARCH}" -u "$(id -u):$(id -g)" -e GOCACHE=/tmp -w /src golang:1.17 go build -o main main.go)
function cleanup() {
diff --git a/syft/pkg/cataloger/java/test-fixtures/Makefile b/syft/pkg/cataloger/java/test-fixtures/Makefile
new file mode 100644
index 00000000000..da3e730e131
--- /dev/null
+++ b/syft/pkg/cataloger/java/test-fixtures/Makefile
@@ -0,0 +1,15 @@
+.DEFAULT_GOAL := default
+
+default:
+ @for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
+ if [ -f "$$dir/Makefile" ]; then \
+ $(MAKE) -C $$dir; \
+ fi; \
+ done
+
+%:
+ @for dir in $(shell find . -mindepth 1 -maxdepth 1 -type d); do \
+ if [ -f "$$dir/Makefile" ]; then \
+ $(MAKE) -C $$dir $@; \
+ fi; \
+ done
diff --git a/syft/pkg/cataloger/java/test-fixtures/jar-metadata/Makefile b/syft/pkg/cataloger/java/test-fixtures/jar-metadata/Makefile
index 98083904234..cf0d21a8672 100644
--- a/syft/pkg/cataloger/java/test-fixtures/jar-metadata/Makefile
+++ b/syft/pkg/cataloger/java/test-fixtures/jar-metadata/Makefile
@@ -1,5 +1,10 @@
CACHE_DIR = cache
CACHE_PATH = $(shell pwd)/cache
+FINGERPRINT_FILE=$(CACHE_DIR).fingerprint
+
+ifndef CACHE_DIR
+ $(error CACHE_DIR is not set)
+endif
JACKSON_CORE = jackson-core-2.15.2
SBT_JACKSON_CORE = com.fasterxml.jackson.core.jackson-core-2.15.2
@@ -8,28 +13,53 @@ API_ALL_SOURCES = api-all-2.0.0-sources
SPRING_INSTRUMENTATION = spring-instrumentation-4.3.0-1.0
MULTIPLE_MATCHING = multiple-matching-2.11.5
-$(CACHE_DIR):
- mkdir -p $(CACHE_DIR)
-$(CACHE_DIR)/$(JACKSON_CORE).jar: $(CACHE_DIR)
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures: $(CACHE_DIR)
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
+
+$(CACHE_DIR): $(CACHE_DIR)/$(JACKSON_CORE).jar $(CACHE_DIR)/$(SBT_JACKSON_CORE).jar $(CACHE_DIR)/$(OPENSAML_CORE).jar $(CACHE_DIR)/$(API_ALL_SOURCES).jar $(CACHE_DIR)/$(SPRING_INSTRUMENTATION).jar $(CACHE_DIR)/$(MULTIPLE_MATCHING).jar
+
+$(CACHE_DIR)/$(JACKSON_CORE).jar:
+ mkdir -p $(CACHE_DIR)
cd $(JACKSON_CORE) && zip -r $(CACHE_PATH)/$(JACKSON_CORE).jar .
-$(CACHE_DIR)/$(SBT_JACKSON_CORE).jar: $(CACHE_DIR)
+$(CACHE_DIR)/$(SBT_JACKSON_CORE).jar:
+ mkdir -p $(CACHE_DIR)
cd $(SBT_JACKSON_CORE) && zip -r $(CACHE_PATH)/$(SBT_JACKSON_CORE).jar .
-$(CACHE_DIR)/$(OPENSAML_CORE).jar: $(CACHE_DIR)
+$(CACHE_DIR)/$(OPENSAML_CORE).jar:
+ mkdir -p $(CACHE_DIR)
cd $(OPENSAML_CORE) && zip -r $(CACHE_PATH)/$(OPENSAML_CORE).jar .
-$(CACHE_DIR)/$(API_ALL_SOURCES).jar: $(CACHE_DIR)
+$(CACHE_DIR)/$(API_ALL_SOURCES).jar:
+ mkdir -p $(CACHE_DIR)
cd $(API_ALL_SOURCES) && zip -r $(CACHE_PATH)/$(API_ALL_SOURCES).jar .
-$(CACHE_DIR)/$(SPRING_INSTRUMENTATION).jar: $(CACHE_DIR)
+$(CACHE_DIR)/$(SPRING_INSTRUMENTATION).jar:
+ mkdir -p $(CACHE_DIR)
cd $(SPRING_INSTRUMENTATION) && zip -r $(CACHE_PATH)/$(SPRING_INSTRUMENTATION).jar .
-$(CACHE_DIR)/$(MULTIPLE_MATCHING).jar: $(CACHE_DIR)
+$(CACHE_DIR)/$(MULTIPLE_MATCHING).jar:
+ mkdir -p $(CACHE_DIR)
cd $(MULTIPLE_MATCHING) && zip -r $(CACHE_PATH)/$(MULTIPLE_MATCHING).jar .
# Jenkins plugins typically do not have the version included in the archive name,
# so it is important to not include it in the generated test fixture
-$(CACHE_DIR)/gradle.hpi: $(CACHE_DIR)
- cd jenkins-plugins/gradle/2.11 && zip -r $(CACHE_PATH)/gradle.hpi .
\ No newline at end of file
+$(CACHE_DIR)/gradle.hpi:
+ mkdir -p $(CACHE_DIR)
+ cd jenkins-plugins/gradle/2.11 && zip -r $(CACHE_PATH)/gradle.hpi .
+
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find . ! -path '*/cache*' -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+
+# requirement 4: 'clean' goal to remove all generated test fixtures
+clean:
+ rm -rf $(CACHE_DIR)/* $(FINGERPRINT_FILE)
diff --git a/syft/pkg/cataloger/java/test-fixtures/java-builds/Makefile b/syft/pkg/cataloger/java/test-fixtures/java-builds/Makefile
index 1970b42f805..b3aae020a24 100644
--- a/syft/pkg/cataloger/java/test-fixtures/java-builds/Makefile
+++ b/syft/pkg/cataloger/java/test-fixtures/java-builds/Makefile
@@ -1,17 +1,18 @@
PKGSDIR=packages
+FINGERPRINT_FILE=$(PKGSDIR).fingerprint
ifndef PKGSDIR
$(error PKGSDIR is not set)
endif
-all: jars archives native-image
-clean: clean-examples
- rm -f $(PKGSDIR)/*
+.DEFAULT_GOAL := fixtures
-clean-examples: clean-gradle clean-maven clean-jenkins clean-nestedjar
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures: jars archives native-image
-.PHONY: maven gradle clean clean-gradle clean-maven clean-jenkins clean-examples clean-nestedjar jars archives
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
jars: $(PKGSDIR)/example-java-app-maven-0.1.0.jar $(PKGSDIR)/example-java-app-gradle-0.1.0.jar $(PKGSDIR)/example-jenkins-plugin.hpi $(PKGSDIR)/spring-boot-0.0.1-SNAPSHOT.jar
@@ -71,8 +72,16 @@ $(PKGSDIR)/example-java-app: $(PKGSDIR)/example-java-app-maven-0.1.0.jar
$(PKGSDIR)/gcc-amd64-darwin-exec-debug:
./build-example-macho-binary.sh $(PKGSDIR)
-# we need a way to determine if CI should bust the test cache based on the source material
-.PHONY: cache.fingerprint
-cache.fingerprint:
- find example* build* gradle* Makefile -type f -exec sha256sum {} \; | sort | tee /dev/stderr | tee cache.fingerprint
- sha256sum cache.fingerprint
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find example-* build-* Makefile -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+
+# requirement 4: 'clean' goal to remove all generated test fixtures
+clean: clean-examples
+ rm -rf $(PKGSDIR) $(FINGERPRINT_FILE)
+
+clean-examples: clean-gradle clean-maven clean-jenkins clean-nestedjar
+
+.PHONY: maven gradle clean clean-gradle clean-maven clean-jenkins clean-examples clean-nestedjar jars archives
diff --git a/syft/pkg/cataloger/kernel/test-fixtures/Makefile b/syft/pkg/cataloger/kernel/test-fixtures/Makefile
index 4a2849919c3..a2c19cf9f4e 100644
--- a/syft/pkg/cataloger/kernel/test-fixtures/Makefile
+++ b/syft/pkg/cataloger/kernel/test-fixtures/Makefile
@@ -1,7 +1,21 @@
-all:
+FINGERPRINT_FILE=cache.fingerprint
-# we need a way to determine if CI should bust the test cache based on the source material
-.PHONY: cache.fingerprint
-cache.fingerprint:
- find Makefile **/Dockerfile -type f -exec sha256sum {} \; | sort | tee /dev/stderr | tee cache.fingerprint
- sha256sum cache.fingerprint
+
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures:
+ @echo "nothing to do"
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
+
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find Makefile **/Dockerfile -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+
+# requirement 4: 'clean' goal to remove all generated test fixtures
+clean:
+ rm -f $(FINGERPRINT_FILE)
diff --git a/syft/pkg/cataloger/python/test-fixtures/image-multi-site-package/Dockerfile b/syft/pkg/cataloger/python/test-fixtures/image-multi-site-package/Dockerfile
index 3895cebbd96..7a8b39c984f 100644
--- a/syft/pkg/cataloger/python/test-fixtures/image-multi-site-package/Dockerfile
+++ b/syft/pkg/cataloger/python/test-fixtures/image-multi-site-package/Dockerfile
@@ -1,9 +1,8 @@
# digest is for linux/amd64
-FROM ubuntu:20.04@sha256:cc9cc8169c9517ae035cf293b15f06922cb8c6c864d625a72b7b18667f264b70
+FROM ubuntu:20.04@sha256:cc9cc8169c9517ae035cf293b15f06922cb8c6c864d625a72b7b18667f264b70 AS base
# install Python 3.8 and Python 3.9
-ENV DEBIAN_FRONTEND=noninteractive
-RUN apt-get update && apt-get install -y python3.8 python3.9 python3-pip python3-venv python3.9-venv python3.8-venv
+RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3.8 python3.9 python3-pip python3-venv python3.9-venv python3.8-venv
# install pip and virtualenv for both Python versions
RUN python3.8 -m pip install --upgrade pip virtualenv
@@ -35,3 +34,17 @@ RUN /app/project2/venv/bin/pip install click==8.0.3 pyyaml==6.0
RUN /app/project2/venv/bin/pip install inquirer==3.2.4 runs==1.2.2 xmod==1.8.1 six==1.16.0 wcwidth==0.2.13 blessed==1.20.0 editor==1.6.6 readchar==4.1.0
WORKDIR /app
+
+# let's not waste disk space... we only need the above state we've setup, not all of the os-level packages
+RUN rm -rf /app/project1/venv/share
+RUN rm -rf /app/project2/venv/share
+RUN find /app/project1/venv/lib/python3.9/site-packages/* -type d ! -name '*.dist-info' -exec rm -rf {} +
+RUN find /app/project2/venv/lib/python3.8/site-packages/* -type d ! -name '*.dist-info' -exec rm -rf {} +
+RUN find /usr/local/lib/python3.8/dist-packages/* -type d ! -name '*.dist-info' -exec rm -rf {} +
+RUN find /usr/local/lib/python3.9/dist-packages/* -type d ! -name '*.dist-info' -exec rm -rf {} +
+
+FROM scratch
+
+COPY --from=base /app/ /app/
+COPY --from=base /usr/local/lib/python3.8/ /usr/local/lib/python3.8/
+COPY --from=base /usr/local/lib/python3.9/ /usr/local/lib/python3.9/
diff --git a/syft/pkg/cataloger/redhat/test-fixtures/Makefile b/syft/pkg/cataloger/redhat/test-fixtures/Makefile
index e280d5e60e7..2495210ab2d 100644
--- a/syft/pkg/cataloger/redhat/test-fixtures/Makefile
+++ b/syft/pkg/cataloger/redhat/test-fixtures/Makefile
@@ -1,21 +1,38 @@
RPMSDIR=rpms
+FINGERPRINT_FILE=$(RPMSDIR).fingerprint
ifndef RPMSDIR
$(error RPMSDIR is not set)
endif
-all: rpms
-clean:
- rm -rf $(RPMSDIR)
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures: rpms
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
rpms:
mkdir -p $(RPMSDIR)
- cd $(RPMSDIR) && curl https://dl.fedoraproject.org/pub/epel/7/x86_64/Packages/a/abc-1.01-9.hg20160905.el7.x86_64.rpm -O
- cd $(RPMSDIR) && curl https://dl.fedoraproject.org/pub/epel/7/x86_64/Packages/z/zork-1.0.3-1.el7.x86_64.rpm -O
-
-# we need a way to determine if CI should bust the test cache based on the source material
-.PHONY: $(RPMSDIR).fingerprint
-$(RPMSDIR).fingerprint:
- find Makefile -type f -exec sha256sum {} \; | sort | tee /dev/stderr | tee $(RPMSDIR).fingerprint
- sha256sum $(RPMSDIR).fingerprint
+ @# see note from https://dl.fedoraproject.org/pub/epel/7/README
+ @# ATTENTION
+ @# ======================================
+ @# The contents of this directory have been moved to our archives available at:
+ @#
+ @# http://archives.fedoraproject.org/pub/archive/epel/
+
+ cd $(RPMSDIR) && curl -LO https://archives.fedoraproject.org/pub/archive/epel/7/x86_64/Packages/a/abc-1.01-9.hg20160905.el7.x86_64.rpm
+ cd $(RPMSDIR) && curl -LO https://archives.fedoraproject.org/pub/archive/epel/7/x86_64/Packages/z/zork-1.0.3-1.el7.x86_64.rpm
+
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find Makefile -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+
+# requirement 4: 'clean' goal to remove all generated test fixtures
+.PHONY: clean
+clean:
+ rm -rf $(RPMSDIR) $(FINGERPRINT_FILE)
diff --git a/syft/pkg/cataloger/redhat/test-fixtures/image-minimal/Dockerfile b/syft/pkg/cataloger/redhat/test-fixtures/image-minimal/Dockerfile
index cda80c34964..6e01032f301 100644
--- a/syft/pkg/cataloger/redhat/test-fixtures/image-minimal/Dockerfile
+++ b/syft/pkg/cataloger/redhat/test-fixtures/image-minimal/Dockerfile
@@ -2,4 +2,8 @@
FROM rockylinux:9.3.20231119@sha256:45cc42828cc5ceeffa3a9b4f6363fb582fac3ab91f77bf403daa067f8f049f96
ADD remove.sh /remove.sh
-RUN /remove.sh
\ No newline at end of file
+RUN /remove.sh
+
+# let's only keep what we need for testing (not the intermediate layers)
+FROM scratch
+COPY --from=0 / /
diff --git a/test/cli/.gitignore b/test/cli/.gitignore
new file mode 100644
index 00000000000..872aa273a4e
--- /dev/null
+++ b/test/cli/.gitignore
@@ -0,0 +1 @@
+results
\ No newline at end of file
diff --git a/test/cli/cyclonedx_valid_test.go b/test/cli/cyclonedx_valid_test.go
index 49755f0bc5e..4f2ce11ed98 100644
--- a/test/cli/cyclonedx_valid_test.go
+++ b/test/cli/cyclonedx_valid_test.go
@@ -33,7 +33,7 @@ func TestValidCycloneDX(t *testing.T) {
{
name: "validate cyclonedx output",
subcommand: "scan",
- args: []string{"-o", "cyclonedx-json"},
+ args: []string{"-o", "cyclonedx-json", "-o", "cyclonedx-json=results/sbom.cdx.json"},
fixture: imageFixture,
assertions: []traitAssertion{
assertSuccessfulReturnCode,
diff --git a/test/cli/scan_cmd_test.go b/test/cli/scan_cmd_test.go
index 555f0856b61..ed84933ffad 100644
--- a/test/cli/scan_cmd_test.go
+++ b/test/cli/scan_cmd_test.go
@@ -143,8 +143,22 @@ func TestPackagesCmdFlags(t *testing.T) {
name: "squashed-scope-flag-hidden-packages",
args: []string{"scan", "-o", "json", "-s", "squashed", hiddenPackagesImage},
assertions: []traitAssertion{
- assertPackageCount(162),
- assertNotInOutput("vsftpd"), // hidden package
+ assertPackageCount(14),
+ // package 1: alpine-baselayout-data@3.6.5-r0 (apk)
+ // package 2: alpine-baselayout@3.6.5-r0 (apk)
+ // package 3: alpine-keys@2.4-r1 (apk)
+ // package 4: apk-tools@2.14.4-r0 (apk)
+ // package 5: busybox-binsh@1.36.1-r29 (apk)
+ // package 6: busybox@1.36.1-r29 (apk)
+ // package 7: ca-certificates-bundle@20240705-r0 (apk)
+ // package 8: libcrypto3@3.3.1-r3 (apk)
+ // package 9: libssl3@3.3.1-r3 (apk)
+ // package 10: musl-utils@1.2.5-r0 (apk)
+ // package 11: musl@1.2.5-r0 (apk)
+ // package 12: scanelf@1.3.7-r2 (apk)
+ // package 13: ssl_client@1.36.1-r29 (apk)
+ // package 14: zlib@1.3.1-r1 (apk)
+ assertNotInOutput(`"name":"curl"`), // hidden package
assertSuccessfulReturnCode,
},
},
@@ -152,9 +166,33 @@ func TestPackagesCmdFlags(t *testing.T) {
name: "all-layers-scope-flag",
args: []string{"scan", "-o", "json", "-s", "all-layers", hiddenPackagesImage},
assertions: []traitAssertion{
- assertPackageCount(163), // packages are now deduplicated for this case
+ assertPackageCount(24),
+ // package 1: alpine-baselayout-data@3.6.5-r0 (apk)
+ // package 2: alpine-baselayout@3.6.5-r0 (apk)
+ // package 3: alpine-keys@2.4-r1 (apk)
+ // package 4: apk-tools@2.14.4-r0 (apk)
+ // package 5: brotli-libs@1.1.0-r2 (apk)
+ // package 6: busybox-binsh@1.36.1-r29 (apk)
+ // package 7: busybox@1.36.1-r29 (apk)
+ // package 8: c-ares@1.28.1-r0 (apk)
+ // package 9: ca-certificates-bundle@20240705-r0 (apk)
+ // package 10: ca-certificates@20240705-r0 (apk)
+ // package 11: curl@8.9.1-r1 (apk)
+ // package 12: libcrypto3@3.3.1-r3 (apk)
+ // package 13: libcurl@8.9.1-r1 (apk)
+ // package 14: libidn2@2.3.7-r0 (apk)
+ // package 15: libpsl@0.21.5-r1 (apk)
+ // package 16: libssl3@3.3.1-r3 (apk)
+ // package 17: libunistring@1.2-r0 (apk)
+ // package 18: musl-utils@1.2.5-r0 (apk)
+ // package 19: musl@1.2.5-r0 (apk)
+ // package 20: nghttp2-libs@1.62.1-r0 (apk)
+ // package 21: scanelf@1.3.7-r2 (apk)
+ // package 22: ssl_client@1.36.1-r29 (apk)
+ // package 23: zlib@1.3.1-r1 (apk)
+ // package 24: zstd-libs@1.5.6-r0 (apk)
assertInOutput("all-layers"),
- assertInOutput("vsftpd"), // hidden package
+ assertInOutput(`"name":"curl"`), // hidden package
assertSuccessfulReturnCode,
},
},
@@ -165,9 +203,9 @@ func TestPackagesCmdFlags(t *testing.T) {
"SYFT_SCOPE": "all-layers",
},
assertions: []traitAssertion{
- assertPackageCount(163), // packages are now deduplicated for this case
+ assertPackageCount(24), // packages are now deduplicated for this case
assertInOutput("all-layers"),
- assertInOutput("vsftpd"), // hidden package
+ assertInOutput(`"name":"curl"`), // hidden package
assertSuccessfulReturnCode,
},
},
diff --git a/test/cli/test-fixtures/Makefile b/test/cli/test-fixtures/Makefile
index 5042a5aad65..ff1de637eb7 100644
--- a/test/cli/test-fixtures/Makefile
+++ b/test/cli/test-fixtures/Makefile
@@ -1,6 +1,22 @@
-# change these if you want CI to not use previous stored cache
-CLI_CACHE_BUSTER := "e5cdfd8"
+FINGERPRINT_FILE=cache.fingerprint
+
+.DEFAULT_GOAL := fixtures
+
+# requirement 1: 'fixtures' goal to generate any and all test fixtures
+fixtures:
+ @echo "nothing to do"
+
+# requirement 2: 'fingerprint' goal to determine if the fixture input that indicates any existing cache should be busted
+fingerprint: $(FINGERPRINT_FILE)
+
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find image-* -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'
+
+# requirement 4: 'clean' goal to remove all generated test fixtures
+.PHONY: clean
+clean:
+ rm -f $(FINGERPRINT_FILE)
-.PHONY: cache.fingerprint
-cache.fingerprint:
- find image-* -type f -exec md5sum {} + | awk '{print $1}' | sort | md5sum | tee cache.fingerprint && echo "$(CLI_CACHE_BUSTER)" >> cache.fingerprint
diff --git a/test/cli/test-fixtures/image-hidden-packages/Dockerfile b/test/cli/test-fixtures/image-hidden-packages/Dockerfile
index 1150209e8b2..07b7f32754e 100644
--- a/test/cli/test-fixtures/image-hidden-packages/Dockerfile
+++ b/test/cli/test-fixtures/image-hidden-packages/Dockerfile
@@ -1,4 +1,4 @@
-FROM centos:7.9.2009@sha256:dead07b4d8ed7e29e98de0f4504d87e8880d4347859d839686a31da35a3b532f
-# all-layers scope should pickup on vsftpd
-RUN yum install -y vsftpd
-RUN yum remove -y vsftpd
+FROM --platform=linux/amd64 alpine:3.20.2@sha256:eddacbc7e24bf8799a4ed3cdcfa50d4b88a323695ad80f317b6629883b2c2a78
+
+RUN apk add --no-cache curl
+RUN apk del curl
diff --git a/test/install/Makefile b/test/install/Makefile
index 2a632cd2825..9d26ebc6364 100644
--- a/test/install/Makefile
+++ b/test/install/Makefile
@@ -1,5 +1,7 @@
NAME=syft
+FINGERPRINT_FILE := cache.fingerprint
+
# for local testing (not testing within containers) use the binny-managed version of cosign.
# this also means that the user does not need to install cosign on their system to run tests.
COSIGN_BINARY=../../.tool/cosign
@@ -21,8 +23,6 @@ ACCEPTANCE_CMD=sh -c '../../install.sh -v -b /usr/local/bin && syft version && r
PREVIOUS_RELEASE=v0.33.0
ACCEPTANCE_PREVIOUS_RELEASE_CMD=sh -c "../../install.sh -b /usr/local/bin $(PREVIOUS_RELEASE) && syft version"
-# CI cache busting values; change these if you want CI to not use previous stored cache
-INSTALL_TEST_CACHE_BUSTER=894d8ca
define title
@printf '\n≡≡≡[ $(1) ]≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡≡\n'
@@ -130,7 +130,8 @@ busybox-1.36:
## For CI ########################################################
-.PHONY: cache.fingerprint
-cache.fingerprint:
- $(call title,Install test fixture fingerprint)
- @find ./environments/* -type f -exec md5sum {} + | awk '{print $1}' | sort | tee /dev/stderr | md5sum | tee cache.fingerprint && echo "$(INSTALL_TEST_CACHE_BUSTER)" >> cache.fingerprint
+# requirement 3: we always need to recalculate the fingerprint based on source regardless of any existing fingerprint
+.PHONY: $(FINGERPRINT_FILE)
+$(FINGERPRINT_FILE):
+ @find ./environments/* -type f -exec sha256sum {} \; | sort -k2 > $(FINGERPRINT_FILE)
+ @#cat $(FINGERPRINT_FILE) | sha256sum | awk '{print $$1}'