diff --git a/go.mod b/go.mod index ee06d499..e0f5936e 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/aws/aws-sdk-go-v2 v1.32.1 github.com/aws/aws-sdk-go-v2/config v1.27.42 github.com/aws/aws-sdk-go-v2/service/s3 v1.65.1 + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc github.com/docker/cli v27.3.1+incompatible github.com/google/uuid v1.6.0 github.com/mholt/archiver/v3 v3.5.1 @@ -58,7 +59,6 @@ require ( github.com/containerd/log v0.1.0 // indirect github.com/containerd/stargz-snapshotter/estargz v0.15.1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect - github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/docker/distribution v2.8.3+incompatible // indirect github.com/docker/docker v26.1.1+incompatible // indirect github.com/docker/docker-credential-helpers v0.8.1 // indirect diff --git a/pkg/dag/image.go b/pkg/dag/image.go index cdd53eba..b1fe215d 100644 --- a/pkg/dag/image.go +++ b/pkg/dag/image.go @@ -17,6 +17,8 @@ type Image struct { ExtraTags []string `yaml:"extra_tags,flow,omitempty"` Dockerfile *dockerfile.Dockerfile `yaml:"dockerfile,omitempty"` IgnorePatterns []string `yaml:"ignore_patterns,flow,omitempty"` + ContextFiles []string `yaml:"-"` + SkipBuild bool `yaml:"-"` NeedsRebuild bool `yaml:"-"` NeedsTests bool `yaml:"-"` RetagDone bool `yaml:"-"` diff --git a/pkg/dag/node.go b/pkg/dag/node.go index 278b703b..99e10458 100644 --- a/pkg/dag/node.go +++ b/pkg/dag/node.go @@ -15,7 +15,6 @@ type NodeVisitorFuncErr func(*Node) error // Node represents a node of a graph. type Node struct { Image *Image - Files []string waitCond *sync.Cond done bool @@ -96,7 +95,3 @@ func (n *Node) walkInDepth(visitor NodeVisitorFunc) { } visitor(n) } - -func (n *Node) AddFile(file string) { - n.Files = append(n.Files, file) -} diff --git a/pkg/dib/generate_dag.go b/pkg/dib/generate_dag.go index c2644ca3..ed2be23d 100644 --- a/pkg/dib/generate_dag.go +++ b/pkg/dib/generate_dag.go @@ -33,234 +33,243 @@ func GenerateDAG(buildPath, registryPrefix, customHashListPath string, buildArgs return nil, err } - return computeHashes(graph, customHashListPath, buildArgs) + var customHashList []string + if customHashListPath != "" { + customHashList, err = loadCustomHashList(customHashListPath) + if err != nil { + return nil, fmt.Errorf("could not load custom humanized hash list: %w", err) + } + } + + if err := computeGraphHashes(graph, customHashList, buildArgs); err != nil { + return nil, fmt.Errorf("could not compute graph hashes: %w", err) + } + + return graph, nil } func buildGraph(buildPath, registryPrefix string) (*dag.DAG, error) { - var allFiles []string - cache := make(map[string]*dag.Node) - allParents := make(map[string][]dockerfile.ImageRef) - err := filepath.Walk(buildPath, func(filePath string, info os.FileInfo, err error) error { - if err != nil { + nodes := make(map[string]*dag.Node) + if err := filepath.WalkDir(buildPath, func(name string, dir os.DirEntry, err error) error { + switch { + case err != nil: return err - } - if !info.IsDir() { - allFiles = append(allFiles, filePath) - } - - if dockerfile.IsDockerfile(filePath) { - dckfile, err := dockerfile.ParseDockerfile(filePath) + case dir.IsDir(): + case dockerfile.IsDockerfile(name): + img, err := newImageFromDockerfile(name, registryPrefix) if err != nil { return err } - skipBuild, hasSkipLabel := dckfile.Labels["skipbuild"] - if hasSkipLabel && skipBuild == "true" { - return nil - } - imageShortName, hasNameLabel := dckfile.Labels["name"] - if !hasNameLabel { - return fmt.Errorf("missing label \"name\" in Dockerfile at path \"%s\"", filePath) - } - img := &dag.Image{ - Name: fmt.Sprintf("%s/%s", registryPrefix, imageShortName), - ShortName: imageShortName, - Dockerfile: dckfile, - } - - extraTagsLabel, hasLabel := img.Dockerfile.Labels["dib.extra-tags"] - if hasLabel { - img.ExtraTags = append(img.ExtraTags, strings.Split(extraTagsLabel, ",")...) - } - - useCustomHashList, hasLabel := img.Dockerfile.Labels["dib.use-custom-hash-list"] - if hasLabel && useCustomHashList == "true" { - img.UseCustomHashList = true - } - - ignorePatterns, err := build.ReadDockerignore(path.Dir(filePath)) - if err != nil { - return fmt.Errorf("could not read ignore patterns: %w", err) + for _, node := range nodes { + if node.Image != nil && node.Image.Name == img.Name { + return fmt.Errorf("duplicate image name %q found while reading file %q: previous file was %q", + img.Name, name, path.Join(node.Image.Dockerfile.ContextPath, node.Image.Dockerfile.Filename)) + } } - img.IgnorePatterns = ignorePatterns - if n, ok := cache[img.Name]; ok { - return fmt.Errorf("duplicate image name %q found while reading file %q: previous file was %q", - img.Name, filePath, path.Join(n.Image.Dockerfile.ContextPath, n.Image.Dockerfile.Filename)) + // Don't create the node if the image has the skipbuild label. + if img.SkipBuild { + return nil } - allParents[img.Name] = dckfile.From - cache[img.Name] = dag.NewNode(img) + nodes[path.Dir(name)] = dag.NewNode(img) } return nil - }) + }); err != nil { + return nil, err + } + + return newGraphFromNodes(nodes), nil +} + +func newImageFromDockerfile(filePath, registryPrefix string) (*dag.Image, error) { + dckfile, err := dockerfile.ParseDockerfile(filePath) if err != nil { return nil, err } - // Fill parents for each image, for simplicity of use in other functions - for name, parents := range allParents { - for _, parent := range parents { - node, ok := cache[parent.Name] - if !ok { - continue - } + skipBuild := false + skipBuildString, hasSkipLabel := dckfile.Labels["skipbuild"] + if hasSkipLabel && skipBuildString == "true" { + skipBuild = true + } - // Check that children does not already exist to avoid duplicates. - childAlreadyExists := false - for _, child := range node.Children() { - if child.Image.Name == name { - childAlreadyExists = true - } - } + shortName, hasNameLabel := dckfile.Labels["name"] + if !skipBuild && !hasNameLabel { + return nil, fmt.Errorf("missing label \"name\" in Dockerfile at path %q", filePath) + } - if childAlreadyExists { - continue - } + imageName := fmt.Sprintf("%s/%s", registryPrefix, shortName) - node.AddChild(cache[name]) - } + var extraTags []string + value, hasLabel := dckfile.Labels["dib.extra-tags"] + if hasLabel { + extraTags = strings.Split(value, ",") } - graph := &dag.DAG{} - // If an image has no parents in the DAG, we consider it a root image - for name, img := range cache { - if len(img.Parents()) == 0 { - graph.AddNode(cache[name]) - } + useCustomHashList := false + value, hasLabel = dckfile.Labels["dib.use-custom-hash-list"] + if hasLabel && value == "true" { + useCustomHashList = true } - fileBelongsTo := map[string]*dag.Node{} - for _, file := range allFiles { - fileBelongsTo[file] = nil + ignorePatterns, err := build.ReadDockerignore(dckfile.ContextPath) + if err != nil { + return nil, fmt.Errorf("could not read dockerignore: %w", err) } - // First, we do a depth-first search in the image graph to map every file the image they belong to. - // We start from the most specific image paths (children of children of children...), and we get back up - // to parent images, to avoid false-positive and false-negative matches. - // Files matching any pattern in the .dockerignore file are ignored. - graph.WalkInDepth(func(node *dag.Node) { - for _, file := range allFiles { - if !strings.HasPrefix(file, node.Image.Dockerfile.ContextPath+"/") { - // The current file is not lying in the current image build context, nor in a subdirectory. - continue + contextFiles, err := getDockerContextFiles(dckfile.ContextPath, ignorePatterns) + if err != nil { + return nil, fmt.Errorf("could not get docker context files: %w", err) + } + + return &dag.Image{ + Name: imageName, + ShortName: shortName, + ExtraTags: extraTags, + Dockerfile: dckfile, + IgnorePatterns: ignorePatterns, + ContextFiles: contextFiles, + SkipBuild: skipBuild, + UseCustomHashList: useCustomHashList, + }, nil +} + +func getDockerContextFiles(contextPath string, ignorePatterns []string) ([]string, error) { + contextFiles := []string{} + if err := filepath.WalkDir(contextPath, func(name string, dir os.DirEntry, err error) error { + switch { + case err != nil: + return err + case dir.IsDir(): + default: + // Don't add ignored files/folders and .dockerignore from the root folder of the context path. + // We ignore .dockerignore files for simplicity + // In the real world, this file should not be ignored, but it + // helps us in managing refactoring. + prefix := strings.TrimPrefix(strings.TrimPrefix(name, contextPath), "/") + if prefix == dockerignore { + return nil } - if fileBelongsTo[file] != nil { - // The current file has already been assigned to an image, most likely to a child image. - continue + if len(ignorePatterns) == 0 { + contextFiles = append(contextFiles, name) + return nil } - if path.Base(file) == dockerignore { - // We ignore dockerignore file itself for simplicity - // In the real world, this file should not be ignored but it - // helps us in managing refactoring - continue + ignorePatternMatcher, err := patternmatcher.New(ignorePatterns) + if err != nil { + return fmt.Errorf("could not create pattern matcher: %w", err) } - if isFileIgnored(node, file) { - // The current file matches a pattern in the dockerignore file - continue + ignored, err := ignorePatternMatcher.MatchesOrParentMatches(prefix) + if err != nil { + return fmt.Errorf("could not match pattern: %w", err) } - // If we reach here, the file is part of the current image's context, we mark it as so. - fileBelongsTo[file] = node - node.AddFile(file) + if !ignored { + contextFiles = append(contextFiles, name) + } } - }) + return nil + }); err != nil { + return nil, err + } - return graph, nil + return contextFiles, nil } -func computeHashes(graph *dag.DAG, customHashListPath string, buildArgs map[string]string) (*dag.DAG, error) { - customHumanizedHashList, err := LoadCustomHashList(customHashListPath) - if err != nil { - return nil, fmt.Errorf("could not load custom humanized hash list: %w", err) - } - - for { - needRepass := false - err := graph.WalkErr(func(node *dag.Node) error { - var parentHashes []string - for _, parent := range node.Parents() { - if parent.Image.Hash == "" { - // At least one of the parent image has not been processed yet, we'll need to do an other pass - needRepass = true +func newGraphFromNodes(nodes map[string]*dag.Node) *dag.DAG { + for _, node := range nodes { + if node.Image == nil { + continue + } + for _, parent := range node.Image.Dockerfile.From { + for _, parentNode := range nodes { + if parentNode.Image == nil { + continue } - parentHashes = append(parentHashes, parent.Image.Hash) - } - - var humanizedKeywords []string - if node.Image.UseCustomHashList { - humanizedKeywords = customHumanizedHashList - } - - filename := path.Join(node.Image.Dockerfile.ContextPath, node.Image.Dockerfile.Filename) - - argInstructionsToReplace := make(map[string]string) - for key, newArg := range buildArgs { - prevArgInstruction, ok := node.Image.Dockerfile.Args[key] - if ok { - argInstructionsToReplace[prevArgInstruction] = fmt.Sprintf("ARG %s=%s", key, newArg) - logger.Debugf("Overriding ARG instruction %q in %q [%q -> %q]", - key, filename, prevArgInstruction, fmt.Sprintf("ARG %s=%s", key, newArg)) + if parentNode.Image.Name == parent.Name { + parentNode.AddChild(node) } } + } + } - if err := dockerfile.ReplaceInFile( - filename, argInstructionsToReplace); err != nil { - return fmt.Errorf("failed to replace ARG instructions in file %s: %w", filename, err) - } - defer func() { - if err := dockerfile.ResetFile( - filename, argInstructionsToReplace); err != nil { - logger.Warnf("failed to reset ARG instructions in file %q: %v", filename, err) - } - }() + graph := &dag.DAG{} + // If an image has no parents in the DAG, we can consider it root + for name, img := range nodes { + if len(img.Parents()) == 0 { + graph.AddNode(nodes[name]) + } + } + + return graph +} - hash, err := HashFiles(node.Image.Dockerfile.ContextPath, node.Files, parentHashes, humanizedKeywords) +func computeGraphHashes(graph *dag.DAG, customHashList []string, buildArgs map[string]string) error { + currNodes := graph.Nodes() + for len(currNodes) > 0 { + for _, node := range currNodes { + var err error + node.Image.Hash, err = computeNodeHash(node, customHashList, buildArgs) if err != nil { - return fmt.Errorf("could not hash files for node %s: %w", node.Image.Name, err) + return fmt.Errorf("could not compute hash for image %q: %w", node.Image.Name, err) } - node.Image.Hash = hash - return nil - }) - if err != nil { - return nil, err } - if !needRepass { - return graph, nil + + nextNodes := []*dag.Node{} + for _, currNode := range currNodes { + nextNodes = append(nextNodes, currNode.Children()...) } + currNodes = nextNodes } + + return nil } -// isFileIgnored checks whether a file matches the images ignore patterns. -// It returns true if the file matches at least one pattern (meaning it should be ignored). -func isFileIgnored(node *dag.Node, file string) bool { - if len(node.Image.IgnorePatterns) == 0 { - return false +func computeNodeHash(node *dag.Node, customHashList []string, buildArgs map[string]string) (string, error) { + var parentHashes []string + for _, parent := range node.Parents() { + parentHashes = append(parentHashes, parent.Image.Hash) } - ignorePatternMatcher, err := patternmatcher.New(node.Image.IgnorePatterns) - if err != nil { - logger.Errorf("Could not create pattern matcher for %s, ignoring", node.Image.ShortName) - return false + var hashList []string + if node.Image.UseCustomHashList { + hashList = customHashList } - prefix := strings.TrimPrefix(strings.TrimPrefix(file, node.Image.Dockerfile.ContextPath), "/") - match, err := ignorePatternMatcher.MatchesOrParentMatches(prefix) - if err != nil { - logger.Errorf("Could not match pattern for %s, ignoring", node.Image.ShortName) - return false + filename := path.Join(node.Image.Dockerfile.ContextPath, node.Image.Dockerfile.Filename) + + argInstructionsToReplace := make(map[string]string) + for key, newArg := range buildArgs { + prevArgInstruction, ok := node.Image.Dockerfile.Args[key] + if ok { + argInstructionsToReplace[prevArgInstruction] = fmt.Sprintf("ARG %s=%s", key, newArg) + logger.Debugf("Overriding ARG instruction %q in %q [%q -> %q]", + key, filename, prevArgInstruction, fmt.Sprintf("ARG %s=%s", key, newArg)) + } } - return match + if err := dockerfile.ReplaceInFile( + filename, argInstructionsToReplace); err != nil { + return "", fmt.Errorf("failed to replace ARG instructions in file %s: %w", filename, err) + } + defer func() { + if err := dockerfile.ResetFile( + filename, argInstructionsToReplace); err != nil { + logger.Warnf("failed to reset ARG instructions in file %q: %v", filename, err) + } + }() + + return hashFiles(node.Image.Dockerfile.ContextPath, node.Image.ContextFiles, parentHashes, hashList) } -// HashFiles computes the sha256 from the contents of the files passed as argument. +// hashFiles computes the sha256 from the contents of the files passed as argument. // The files are alphabetically sorted so the returned hash is always the same. // This also means the hash will change if the file names change but the contents don't. -func HashFiles(baseDir string, files, parentHashes, customHumanizedHashWordList []string) (string, error) { +func hashFiles(baseDir string, files, parentHashes, hashList []string) (string, error) { hash := sha256.New() slices.Sort(files) for _, filename := range files { @@ -285,18 +294,16 @@ func HashFiles(baseDir string, files, parentHashes, customHumanizedHashWordList } } - parentHashes = append([]string(nil), parentHashes...) slices.Sort(parentHashes) for _, parentHash := range parentHashes { hash.Write([]byte(parentHash)) } - worldListToUse := humanhash.DefaultWordList - if customHumanizedHashWordList != nil { - worldListToUse = customHumanizedHashWordList + if len(hashList) == 0 { + hashList = humanhash.DefaultWordList } - humanReadableHash, err := humanhash.HumanizeUsing(hash.Sum(nil), humanizedHashWordLength, worldListToUse, "-") + humanReadableHash, err := humanhash.HumanizeUsing(hash.Sum(nil), humanizedHashWordLength, hashList, "-") if err != nil { return "", fmt.Errorf("could not humanize hash: %w", err) } @@ -304,12 +311,8 @@ func HashFiles(baseDir string, files, parentHashes, customHumanizedHashWordList return humanReadableHash, nil } -// LoadCustomHashList try to load & parse a list of custom humanized hash to use. -func LoadCustomHashList(filepath string) ([]string, error) { - if filepath == "" { - return nil, nil - } - +// loadCustomHashList try to load & parse a list of custom humanized hash to use. +func loadCustomHashList(filepath string) ([]string, error) { file, err := os.Open(filepath) if err != nil { return nil, err diff --git a/pkg/dib/generate_dag_internal_test.go b/pkg/dib/generate_dag_internal_test.go index 205d8052..d14d5933 100644 --- a/pkg/dib/generate_dag_internal_test.go +++ b/pkg/dib/generate_dag_internal_test.go @@ -1,9 +1,359 @@ +//nolint:paralleltest,tparallel package dib -import "testing" +import ( + "fmt" + "os" + "os/exec" + "path" + "strings" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/radiofrance/dib/pkg/dag" + "github.com/radiofrance/dib/pkg/dockerfile" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const ( + basePath = "../../test/fixtures/docker" + registryPrefix = "eu.gcr.io/my-test-repository" +) + +func TestGenerateDAG(t *testing.T) { + dirRoot1 := basePath + "/root1" + hashRoot1, err := hashFiles(dirRoot1, + []string{ + dirRoot1 + "/Dockerfile", + dirRoot1 + "/custom-hash-list/Dockerfile", + dirRoot1 + "/dockerignore/.dockerignore", + dirRoot1 + "/dockerignore/Dockerfile", + dirRoot1 + "/dockerignore/ignored.txt", + dirRoot1 + "/multistage/Dockerfile", + dirRoot1 + "/skipbuild/Dockerfile", + dirRoot1 + "/sub1/Dockerfile", + dirRoot1 + "/sub1/sub2/Dockerfile", + dirRoot1 + "/with-a-file/Dockerfile", + dirRoot1 + "/with-a-file/included.txt", + }, nil, nil) + require.NoError(t, err) + + hashCHL, err := hashFiles(dirRoot1+"/custom-hash-list", + []string{dirRoot1 + "/custom-hash-list/Dockerfile"}, + []string{hashRoot1}, nil) + require.NoError(t, err) + + hashDockerignore, err := hashFiles(dirRoot1+"/dockerignore", + []string{dirRoot1 + "/dockerignore/Dockerfile"}, + []string{hashRoot1}, nil) + require.NoError(t, err) + + hashMultistage, err := hashFiles(dirRoot1+"/multistage", + []string{dirRoot1 + "/multistage/Dockerfile"}, + []string{hashRoot1}, nil) + require.NoError(t, err) + + hashSub1, err := hashFiles(dirRoot1+"/sub1", + []string{ + dirRoot1 + "/sub1/Dockerfile", + dirRoot1 + "/sub1/sub2/Dockerfile", + }, + []string{hashRoot1}, nil) + require.NoError(t, err) + + hashSub2, err := hashFiles(dirRoot1+"/sub1/sub2", + []string{ + dirRoot1 + "/sub1/sub2/Dockerfile", + }, + []string{hashSub1}, nil) + require.NoError(t, err) + + hashWithAFile, err := hashFiles(dirRoot1+"/with-a-file", + []string{ + dirRoot1 + "/with-a-file/Dockerfile", + dirRoot1 + "/with-a-file/included.txt", + }, + []string{hashRoot1}, nil) + require.NoError(t, err) + + dirRoot2 := basePath + "/root2" + hashRoot2, err := hashFiles(dirRoot2, + []string{ + dirRoot2 + "/Dockerfile", + dirRoot2 + "/root3/Dockerfile", + }, nil, nil) + require.NoError(t, err) + + dirRoot3 := dirRoot2 + "/root3" + hashRoot3, err := hashFiles(dirRoot3, + []string{ + dirRoot3 + "/Dockerfile", + }, nil, nil) + require.NoError(t, err) + + hashTwoParents, err := hashFiles(basePath+"/two-parents", + []string{ + basePath + "/two-parents/Dockerfile", + }, + []string{hashRoot1, hashRoot2}, nil) + require.NoError(t, err) + + graph, err := GenerateDAG(basePath, registryPrefix, "", nil) + require.NoError(t, err) + + nominalGraph := graph.Sprint(path.Base(basePath)) + assert.Equal(t, fmt.Sprintf(`docker +├──┬root1 [%s] +│ ├───custom-hash-list [%s] +│ ├───dockerignore [%s] +│ ├───multistage [%s] +│ ├──┬sub1 [%s] +│ │ └───sub2 [%s] +│ ├───two-parents [%s] +│ └───with-a-file [%s] +├──┬root2 [%s] +│ └───two-parents [%s] +└───root3 [%s] +`, hashRoot1, hashCHL, hashDockerignore, hashMultistage, + hashSub1, hashSub2, hashTwoParents, hashWithAFile, hashRoot2, hashTwoParents, hashRoot3), + nominalGraph) + nominalLines := strings.Split(nominalGraph, "\n") + + t.Run("adding a file to the root1 directory", func(t *testing.T) { + copiedDir := copyFixtures(t) + + baseDir := copiedDir + "/root1" + + // When I add a new file in root1 + newFilePath := baseDir + "/newfile" + require.NoError(t, os.WriteFile(newFilePath, []byte("any content"), 0o600)) + + graph, err := GenerateDAG(copiedDir, registryPrefix, "", nil) + require.NoError(t, err) + + have := graph.Sprint(path.Base(copiedDir)) + newLines := strings.Split(have, "\n") + assert.Len(t, newLines, len(nominalLines)) + for i := range nominalLines { + switch i { + case 0, 9, 11, 12: + assert.Equal(t, nominalLines[i], newLines[i]) + default: + assert.NotEqual(t, nominalLines[i], newLines[i]) + } + } + }) + + t.Run("adding a file to the multistage directory", func(t *testing.T) { + copiedDir := copyFixtures(t) + + baseDir := copiedDir + "/root1" + + // When I add a new file in root1/multistage + newFilePath := baseDir + "/multistage/newfile" + require.NoError(t, os.WriteFile(newFilePath, []byte("any content"), 0o600)) + + graph, err := GenerateDAG(copiedDir, registryPrefix, "", nil) + require.NoError(t, err) + + have := graph.Sprint(path.Base(copiedDir)) + newLines := strings.Split(have, "\n") + assert.Len(t, newLines, len(nominalLines)) + for i := range nominalLines { + switch i { + case 0, 9, 11, 12: + assert.Equal(t, nominalLines[i], newLines[i]) + default: + assert.NotEqual(t, nominalLines[i], newLines[i]) + } + } + }) + + t.Run("using custom hash list", func(t *testing.T) { + copiedDir := copyFixtures(t) + + // Recompute hash of custom-hash-list, which is the only node that has the label 'dib.use-custom-hash-list' + customHashListPath := "../../test/fixtures/dib/valid_wordlist.txt" + customHashList, err := loadCustomHashList(customHashListPath) + require.NoError(t, err) + + hashCHL, err := hashFiles(copiedDir+"/root1/custom-hash-list", []string{ + copiedDir + "/root1/custom-hash-list/Dockerfile", + }, []string{hashRoot1}, customHashList) + require.NoError(t, err) + + graph, err := GenerateDAG(copiedDir, registryPrefix, customHashListPath, nil) + require.NoError(t, err) + + // Only the custom-hash-list node, which has the label 'dib.use-custom-hash-list', should change + assert.Equal(t, fmt.Sprintf(`docker +├──┬root1 [%s] +│ ├───custom-hash-list [%s] +│ ├───dockerignore [%s] +│ ├───multistage [%s] +│ ├──┬sub1 [%s] +│ │ └───sub2 [%s] +│ ├───two-parents [%s] +│ └───with-a-file [%s] +├──┬root2 [%s] +│ └───two-parents [%s] +└───root3 [%s] +`, hashRoot1, hashCHL, hashDockerignore, hashMultistage, + hashSub1, hashSub2, hashTwoParents, hashWithAFile, hashRoot2, hashTwoParents, hashRoot3), + graph.Sprint(path.Base(basePath))) + }) + + t.Run("using build args", func(t *testing.T) { + copiedDir := copyFixtures(t) + + baseDir := copiedDir + "/root1" + + dckfile, err := dockerfile.ParseDockerfile(baseDir + "/Dockerfile") + require.NoError(t, err) + + buildArgs := map[string]string{ + "HELLO": "world", + } + argInstructionsToReplace := make(map[string]string) + for key, newArg := range buildArgs { + prevArgInstruction, ok := dckfile.Args[key] + if ok { + argInstructionsToReplace[prevArgInstruction] = fmt.Sprintf("ARG %s=%s", key, newArg) + } + } + require.NoError(t, dockerfile.ReplaceInFile(baseDir+"/Dockerfile", argInstructionsToReplace)) + + graph, err := GenerateDAG(copiedDir, registryPrefix, "", buildArgs) + require.NoError(t, err) + + // Only root1 node has the 'HELLO' argument, so its hash and all of its children should change + have := graph.Sprint(path.Base(copiedDir)) + newLines := strings.Split(have, "\n") + assert.Len(t, newLines, len(nominalLines)) + for i := range nominalLines { + switch i { + case 0, 9, 11, 12: + assert.Equal(t, nominalLines[i], newLines[i]) + default: + assert.NotEqual(t, nominalLines[i], newLines[i]) + } + } + }) + + t.Run("duplicates image names", func(t *testing.T) { + dupDir := "../../test/fixtures/docker-duplicates" + _, err := GenerateDAG(dupDir, registryPrefix, "", nil) + require.EqualError(t, err, + fmt.Sprintf(`duplicate image name "%s/duplicate" found while reading file `+ + `"%s/root/duplicate2/Dockerfile": previous file was "%s/root/duplicate1/Dockerfile"`, + registryPrefix, dupDir, dupDir)) + }) +} + +// copyFixtures copies the buildPath directory into a temporary one to be free to edit files. +func copyFixtures(t *testing.T) string { + t.Helper() + cwd, err := os.Getwd() + require.NoError(t, err) + src := path.Join(cwd, basePath) + dest := t.TempDir() + cmd := exec.Command("cp", "-r", src, dest) + require.NoError(t, cmd.Run()) + return path.Join(dest, path.Base(basePath)) +} func Test_buildGraph(t *testing.T) { - t.Parallel() + graph, err := buildGraph(basePath, registryPrefix) + require.NoError(t, err) + graph.WalkInDepth(func(node *dag.Node) { + files := node.Image.ContextFiles + switch node.Image.ShortName { + case "root1": + require.Len(t, files, 11, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root1/Dockerfile") + assert.Contains(t, files, basePath+"/root1/custom-hash-list/Dockerfile") + assert.Contains(t, files, basePath+"/root1/dockerignore/.dockerignore") + assert.Contains(t, files, basePath+"/root1/dockerignore/Dockerfile") + assert.Contains(t, files, basePath+"/root1/dockerignore/ignored.txt") + assert.Contains(t, files, basePath+"/root1/multistage/Dockerfile") + assert.Contains(t, files, basePath+"/root1/skipbuild/Dockerfile") + assert.Contains(t, files, basePath+"/root1/sub1/Dockerfile") + assert.Contains(t, files, basePath+"/root1/sub1/sub2/Dockerfile") + assert.Contains(t, files, basePath+"/root1/with-a-file/Dockerfile") + assert.Contains(t, files, basePath+"/root1/with-a-file/included.txt") + case "custom-hash-list": + require.Len(t, files, 1, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root1/custom-hash-list/Dockerfile") + case "dockerignore": + require.Len(t, files, 1, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root1/dockerignore/Dockerfile") + case "multistage": + require.Len(t, files, 1, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root1/multistage/Dockerfile") + case "sub1": + require.Len(t, files, 2, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root1/sub1/Dockerfile") + assert.Contains(t, files, basePath+"/root1/sub1/sub2/Dockerfile") + case "sub2": + require.Len(t, files, 1, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root1/sub1/sub2/Dockerfile") + case "with-a-file": + require.Len(t, files, 2, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root1/with-a-file/Dockerfile") + assert.Contains(t, files, basePath+"/root1/with-a-file/included.txt") + case "root2": + require.Len(t, files, 2, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root2/Dockerfile") + assert.Contains(t, files, basePath+"/root2/root3/Dockerfile") + case "root3": + require.Len(t, files, 1, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/root2/root3/Dockerfile") + case "two-parents": + require.Len(t, files, 1, spew.Sdump(files)) + assert.Contains(t, files, basePath+"/two-parents/Dockerfile") + default: + t.Errorf("unexpected image: %s", node.Image.ShortName) + } + }) +} + +func Test_loadCustomHashList(t *testing.T) { + testCases := []struct { + name string + input string + expected []string + expectedErr error + }{ + { + name: "custom wordlist txt", + input: "../../test/fixtures/dib/wordlist.txt", + expected: []string{"a", "b", "c"}, + }, + { + name: "custom wordlist yml", + input: "../../test/fixtures/dib/wordlist.yml", + expected: []string{"e", "f", "g"}, + }, + { + name: "wordlist file not exist", + input: "../../test/fixtures/dib/lorem.txt", + expected: nil, + expectedErr: os.ErrNotExist, + }, + } + + for _, test := range testCases { + t.Run(test.name, func(t *testing.T) { + t.Parallel() - // Implement me. + actual, err := loadCustomHashList(test.input) + if test.expectedErr == nil { + require.NoError(t, err) + assert.Equal(t, test.expected, actual) + } else { + require.ErrorIs(t, err, test.expectedErr) + } + }) + } } diff --git a/pkg/dib/generate_dag_test.go b/pkg/dib/generate_dag_test.go deleted file mode 100644 index 00462a71..00000000 --- a/pkg/dib/generate_dag_test.go +++ /dev/null @@ -1,226 +0,0 @@ -package dib_test - -import ( - "fmt" - "os" - "os/exec" - "path" - "testing" - - "github.com/radiofrance/dib/pkg/dag" - "github.com/radiofrance/dib/pkg/dib" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -const ( - buildPath1 = "../../test/fixtures/docker" - buildPath2 = "../../test/fixtures/docker-duplicates" - registryPrefix = "eu.gcr.io/my-test-repository" -) - -//nolint:paralleltest,dupl -func TestGenerateDAG(t *testing.T) { - t.Run("basic tests", func(t *testing.T) { - graph, err := dib.GenerateDAG(buildPath1, registryPrefix, "", nil) - require.NoError(t, err) - - nodes := flattenNodes(graph) - rootNode := nodes["bullseye"] - subNode := nodes["sub-image"] - multistageNode := nodes["multistage"] - - rootImage := rootNode.Image - assert.Equal(t, path.Join(registryPrefix, "bullseye"), rootImage.Name) - assert.Equal(t, "bullseye", rootImage.ShortName) - assert.Empty(t, rootNode.Parents()) - assert.Len(t, rootNode.Children(), 3) - assert.Len(t, subNode.Parents(), 1) - assert.Len(t, multistageNode.Parents(), 1) - assert.Equal(t, []string{"latest"}, multistageNode.Image.ExtraTags) - }) - - t.Run("modifying the root node should change all hashes", func(t *testing.T) { - buildPath := copyFixtures(t, buildPath1) - - graph0, err := dib.GenerateDAG(buildPath, registryPrefix, "", nil) - require.NoError(t, err) - - nodes0 := flattenNodes(graph0) - rootNode0 := nodes0["bullseye"] - subNode0 := nodes0["sub-image"] - multistageNode0 := nodes0["multistage"] - - // When I add a new file in bullseye/ (root node) - //nolint:gosec - require.NoError(t, os.WriteFile( - path.Join(buildPath, "bullseye/newfile"), - []byte("any content"), - os.ModePerm)) - - // Then ONLY the hash of the child node bullseye/multistage should have changed - graph1, err := dib.GenerateDAG(buildPath, registryPrefix, "", nil) - require.NoError(t, err) - - nodes1 := flattenNodes(graph1) - rootNode1 := nodes1["bullseye"] - subNode1 := nodes1["sub-image"] - multistageNode1 := nodes1["multistage"] - - assert.NotEqual(t, rootNode0.Image.Hash, rootNode1.Image.Hash) - assert.NotEqual(t, subNode0.Image.Hash, subNode1.Image.Hash) - assert.NotEqual(t, multistageNode0.Image.Hash, multistageNode1.Image.Hash) - }) - - t.Run("modifying a child node should change only its hash", func(t *testing.T) { - buildPath := copyFixtures(t, buildPath1) - - graph0, err := dib.GenerateDAG(buildPath, registryPrefix, "", nil) - require.NoError(t, err) - - nodes0 := flattenNodes(graph0) - rootNode0 := nodes0["bullseye"] - subNode0 := nodes0["sub-image"] - multistageNode0 := nodes0["multistage"] - - // When I add a new file in bullseye/multistage/ (child node) - //nolint:gosec - require.NoError(t, os.WriteFile( - path.Join(buildPath, "bullseye/multistage/newfile"), - []byte("file contents"), - os.ModePerm)) - - // Then ONLY the hash of the child node bullseye/multistage should have changed - graph1, err := dib.GenerateDAG(buildPath, registryPrefix, "", nil) - require.NoError(t, err) - - nodes1 := flattenNodes(graph1) - rootNode1 := nodes1["bullseye"] - subNode1 := nodes1["sub-image"] - multistageNode1 := nodes1["multistage"] - - assert.Equal(t, rootNode0.Image.Hash, rootNode1.Image.Hash) - assert.Equal(t, subNode0.Image.Hash, subNode1.Image.Hash) - assert.NotEqual(t, multistageNode0.Image.Hash, multistageNode1.Image.Hash) - }) - - t.Run("using custom hash list should change only hashes of nodes with custom label", func(t *testing.T) { - graph0, err := dib.GenerateDAG(buildPath1, registryPrefix, "", nil) - require.NoError(t, err) - - graph1, err := dib.GenerateDAG(buildPath1, registryPrefix, - "../../test/fixtures/dib/valid_wordlist.txt", nil) - require.NoError(t, err) - - nodes0 := flattenNodes(graph0) - rootNode0 := nodes0["bullseye"] - subNode0 := nodes0["sub-image"] - nodes1 := flattenNodes(graph1) - rootNode1 := nodes1["bullseye"] - subNode1 := nodes1["sub-image"] - - assert.Equal(t, rootNode1.Image.Hash, rootNode0.Image.Hash) - assert.Equal(t, "violet-minnesota-alabama-alpha", subNode0.Image.Hash) - assert.Equal(t, "golduck-dialga-abra-aegislash", subNode1.Image.Hash) - }) - - t.Run("using arg used in root node should change all hashes", func(t *testing.T) { - graph0, err := dib.GenerateDAG(buildPath1, registryPrefix, "", nil) - require.NoError(t, err) - - graph1, err := dib.GenerateDAG(buildPath1, registryPrefix, "", - map[string]string{ - "HELLO": "world", - }) - require.NoError(t, err) - - nodes0 := flattenNodes(graph0) - rootNode0 := nodes0["bullseye"] - nodes1 := flattenNodes(graph1) - rootNode1 := nodes1["bullseye"] - - assert.NotEqual(t, rootNode1.Image.Hash, rootNode0.Image.Hash) - }) - - t.Run("duplicates", func(t *testing.T) { - graph, err := dib.GenerateDAG(buildPath2, registryPrefix, "", nil) - require.Error(t, err) - require.Nil(t, graph) - require.EqualError(t, err, - fmt.Sprintf( - "duplicate image name \"%s/duplicate\" found while reading file \"%s/bullseye/duplicate2/Dockerfile\": previous file was \"%s/bullseye/duplicate1/Dockerfile\"", //nolint:lll - registryPrefix, buildPath2, buildPath2)) - }) -} - -// copyFixtures copies the buildPath directory into a temporary one to be free to edit files. -func copyFixtures(t *testing.T, buildPath string) string { - t.Helper() - cwd, err := os.Getwd() - require.NoError(t, err) - src := path.Join(cwd, buildPath) - dest := t.TempDir() - cmd := exec.Command("cp", "-r", src, dest) - require.NoError(t, cmd.Run()) - return dest + "/docker" -} - -func flattenNodes(graph *dag.DAG) map[string]*dag.Node { - flatNodes := map[string]*dag.Node{} - - graph.Walk(func(node *dag.Node) { - flatNodes[node.Image.ShortName] = node - }) - - return flatNodes -} - -func TestLoadCustomHashList(t *testing.T) { - t.Parallel() - - testCases := []struct { - name string - input string - expected []string - expectedErr string - }{ - { - name: "standard wordlist", - input: "", - expected: nil, - expectedErr: "", - }, - { - name: "custom wordlist txt", - input: "../../test/fixtures/dib/wordlist.txt", - expected: []string{"a", "b", "c"}, - expectedErr: "", - }, - { - name: "custom wordlist yml", - input: "../../test/fixtures/dib/wordlist.yml", - expected: []string{"e", "f", "g"}, - expectedErr: "", - }, - { - name: "wordlist file not exist", - input: "../../test/fixtures/dib/lorem.txt", - expected: nil, - expectedErr: "open ../../test/fixtures/dib/lorem.txt: no such file or directory", - }, - } - - for _, test := range testCases { - t.Run(test.name, func(t *testing.T) { - t.Parallel() - - actual, err := dib.LoadCustomHashList(test.input) - if test.expectedErr == "" { - require.NoError(t, err) - } else { - require.EqualError(t, err, test.expectedErr) - } - assert.Equal(t, test.expected, actual) - }) - } -} diff --git a/pkg/graphviz/graphviz_test.go b/pkg/graphviz/graphviz_test.go index 2c2d729b..274ebdd1 100644 --- a/pkg/graphviz/graphviz_test.go +++ b/pkg/graphviz/graphviz_test.go @@ -35,12 +35,23 @@ func Test_GenerateDotviz(t *testing.T) { content, err := os.ReadFile(dotFile) require.NoError(t, err) f := string(content) - assert.Len(t, f, 647) - assert.Contains(t, f, `"eu.gcr.io/my-test-repository/bullseye" [fillcolor=white style=filled];`) - assert.Contains(t, f, `"eu.gcr.io/my-test-repository/bullseye" -> "eu.gcr.io/my-test-repository/kaniko";`) - assert.Contains(t, f, `"eu.gcr.io/my-test-repository/bullseye" -> "eu.gcr.io/my-test-repository/multistage";`) - assert.Contains(t, f, `"eu.gcr.io/my-test-repository/bullseye" -> "eu.gcr.io/my-test-repository/sub-image";`) - assert.Contains(t, f, `"eu.gcr.io/my-test-repository/kaniko" [fillcolor=white style=filled];`) + assert.Len(t, f, 1490) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root1" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root1" -> "eu.gcr.io/my-test-repository/custom-hash-list";`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root1" -> "eu.gcr.io/my-test-repository/dockerignore";`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root1" -> "eu.gcr.io/my-test-repository/multistage";`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root1" -> "eu.gcr.io/my-test-repository/sub1";`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/sub1" -> "eu.gcr.io/my-test-repository/sub2";`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root1" -> "eu.gcr.io/my-test-repository/with-a-file";`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/dockerignore" [fillcolor=white style=filled];`) assert.Contains(t, f, `"eu.gcr.io/my-test-repository/multistage" [fillcolor=white style=filled];`) - assert.Contains(t, f, `"eu.gcr.io/my-test-repository/sub-image" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/with-a-file" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/custom-hash-list" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/sub1" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/sub2" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root2" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root3" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/two-parents" [fillcolor=white style=filled];`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root1" -> "eu.gcr.io/my-test-repository/two-parents";`) + assert.Contains(t, f, `"eu.gcr.io/my-test-repository/root2" -> "eu.gcr.io/my-test-repository/two-parents";`) } diff --git a/test/fixtures/docker-duplicates/bullseye/Dockerfile b/test/fixtures/docker-duplicates/bullseye/Dockerfile deleted file mode 100644 index 03dfbc9a..00000000 --- a/test/fixtures/docker-duplicates/bullseye/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM debian:bullseye - -LABEL name="bullseye" -LABEL version="v1" - -ARG HELLO="there" - -RUN echo "Hello $HELLO" diff --git a/test/fixtures/docker-duplicates/bullseye/duplicate1/Dockerfile b/test/fixtures/docker-duplicates/bullseye/duplicate1/Dockerfile deleted file mode 100644 index 67c1255a..00000000 --- a/test/fixtures/docker-duplicates/bullseye/duplicate1/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM eu.gcr.io/my-test-repository/bullseye:v1 - -LABEL name="duplicate" -LABEL version="v1" -LABEL dib.use-custom-hash-list="true" diff --git a/test/fixtures/docker-duplicates/bullseye/duplicate2/Dockerfile b/test/fixtures/docker-duplicates/bullseye/duplicate2/Dockerfile deleted file mode 100644 index 4965728c..00000000 --- a/test/fixtures/docker-duplicates/bullseye/duplicate2/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM eu.gcr.io/my-test-repository/bullseye:v1 - -LABEL name="duplicate" -LABEL version="v2" -LABEL dib.use-custom-hash-list="true" diff --git a/test/fixtures/docker-duplicates/root/Dockerfile b/test/fixtures/docker-duplicates/root/Dockerfile new file mode 100644 index 00000000..18a9f234 --- /dev/null +++ b/test/fixtures/docker-duplicates/root/Dockerfile @@ -0,0 +1,3 @@ +FROM debian:bullseye + +LABEL name="root" diff --git a/test/fixtures/docker-duplicates/root/duplicate1/Dockerfile b/test/fixtures/docker-duplicates/root/duplicate1/Dockerfile new file mode 100644 index 00000000..ba49e241 --- /dev/null +++ b/test/fixtures/docker-duplicates/root/duplicate1/Dockerfile @@ -0,0 +1,3 @@ +FROM eu.gcr.io/my-test-repository/root + +LABEL name="duplicate" diff --git a/test/fixtures/docker-duplicates/root/duplicate2/Dockerfile b/test/fixtures/docker-duplicates/root/duplicate2/Dockerfile new file mode 100644 index 00000000..ba49e241 --- /dev/null +++ b/test/fixtures/docker-duplicates/root/duplicate2/Dockerfile @@ -0,0 +1,3 @@ +FROM eu.gcr.io/my-test-repository/root + +LABEL name="duplicate" diff --git a/test/fixtures/docker/bullseye/external-parent/Dockerfile b/test/fixtures/docker/bullseye/external-parent/Dockerfile deleted file mode 100644 index 023e5e5d..00000000 --- a/test/fixtures/docker/bullseye/external-parent/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -# Check for new release: https://github.com/GoogleContainerTools/kaniko/tags -ARG KANIKO_VERSION=v1.6.0 -# We are using official Docker image as base -FROM gcr.io/kaniko-project/executor:${KANIKO_VERSION} as kaniko_artifacts - -FROM eu.gcr.io/my-test-repository/bullseye:v1 -LABEL name="kaniko" -LABEL version="16" - diff --git a/test/fixtures/docker/bullseye/multistage/Dockerfile b/test/fixtures/docker/bullseye/multistage/Dockerfile deleted file mode 100644 index fa8fbc52..00000000 --- a/test/fixtures/docker/bullseye/multistage/Dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -FROM eu.gcr.io/my-test-repository/bullseye:v1 as builder -FROM eu.gcr.io/my-test-repository/node:v1 - -LABEL name="multistage" -LABEL version="v1" -LABEL dib.extra-tags="latest" diff --git a/test/fixtures/docker/bullseye/skipbuild/Dockerfile b/test/fixtures/docker/bullseye/skipbuild/Dockerfile deleted file mode 100644 index a3ce09d3..00000000 --- a/test/fixtures/docker/bullseye/skipbuild/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM eu.gcr.io/my-test-repository/bullseye:v1 - -LABEL name="skipbuild" -LABEL skipbuild="true" diff --git a/test/fixtures/docker/bullseye/sub-image/Dockerfile b/test/fixtures/docker/bullseye/sub-image/Dockerfile deleted file mode 100644 index 1cfd9d66..00000000 --- a/test/fixtures/docker/bullseye/sub-image/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM eu.gcr.io/my-test-repository/bullseye:v1 - -LABEL name="sub-image" -LABEL version="v1" -LABEL dib.use-custom-hash-list="true" diff --git a/test/fixtures/docker/bullseye/Dockerfile b/test/fixtures/docker/root1/Dockerfile similarity index 79% rename from test/fixtures/docker/bullseye/Dockerfile rename to test/fixtures/docker/root1/Dockerfile index 03dfbc9a..016906ae 100644 --- a/test/fixtures/docker/bullseye/Dockerfile +++ b/test/fixtures/docker/root1/Dockerfile @@ -1,6 +1,6 @@ FROM debian:bullseye -LABEL name="bullseye" +LABEL name="root1" LABEL version="v1" ARG HELLO="there" diff --git a/test/fixtures/docker/root1/custom-hash-list/Dockerfile b/test/fixtures/docker/root1/custom-hash-list/Dockerfile new file mode 100644 index 00000000..c23c978a --- /dev/null +++ b/test/fixtures/docker/root1/custom-hash-list/Dockerfile @@ -0,0 +1,5 @@ +FROM eu.gcr.io/my-test-repository/root1:v1 + +LABEL name="custom-hash-list" + +LABEL dib.use-custom-hash-list="true" diff --git a/test/fixtures/docker/root1/dockerignore/.dockerignore b/test/fixtures/docker/root1/dockerignore/.dockerignore new file mode 100644 index 00000000..f89d64da --- /dev/null +++ b/test/fixtures/docker/root1/dockerignore/.dockerignore @@ -0,0 +1 @@ +ignored.txt diff --git a/test/fixtures/docker/root1/dockerignore/Dockerfile b/test/fixtures/docker/root1/dockerignore/Dockerfile new file mode 100644 index 00000000..5ebad592 --- /dev/null +++ b/test/fixtures/docker/root1/dockerignore/Dockerfile @@ -0,0 +1,3 @@ +FROM eu.gcr.io/my-test-repository/root1:v1 + +LABEL name="dockerignore" diff --git a/test/fixtures/docker/root1/dockerignore/ignored.txt b/test/fixtures/docker/root1/dockerignore/ignored.txt new file mode 100644 index 00000000..e69de29b diff --git a/test/fixtures/docker/root1/multistage/Dockerfile b/test/fixtures/docker/root1/multistage/Dockerfile new file mode 100644 index 00000000..18559eea --- /dev/null +++ b/test/fixtures/docker/root1/multistage/Dockerfile @@ -0,0 +1,4 @@ +FROM eu.gcr.io/my-test-repository/root1:v1 as builder +FROM vault + +LABEL name="multistage" diff --git a/test/fixtures/docker/root1/skipbuild/Dockerfile b/test/fixtures/docker/root1/skipbuild/Dockerfile new file mode 100644 index 00000000..a6c98d11 --- /dev/null +++ b/test/fixtures/docker/root1/skipbuild/Dockerfile @@ -0,0 +1,3 @@ +FROM eu.gcr.io/my-test-repository/root1:v1 + +LABEL skipbuild="true" diff --git a/test/fixtures/docker/root1/sub1/Dockerfile b/test/fixtures/docker/root1/sub1/Dockerfile new file mode 100644 index 00000000..eb238e46 --- /dev/null +++ b/test/fixtures/docker/root1/sub1/Dockerfile @@ -0,0 +1,3 @@ +FROM eu.gcr.io/my-test-repository/root1:v1 + +LABEL name="sub1" diff --git a/test/fixtures/docker/root1/sub1/sub2/Dockerfile b/test/fixtures/docker/root1/sub1/sub2/Dockerfile new file mode 100644 index 00000000..5a159dc6 --- /dev/null +++ b/test/fixtures/docker/root1/sub1/sub2/Dockerfile @@ -0,0 +1,3 @@ +FROM eu.gcr.io/my-test-repository/sub1 + +LABEL name="sub2" diff --git a/test/fixtures/docker/root1/with-a-file/Dockerfile b/test/fixtures/docker/root1/with-a-file/Dockerfile new file mode 100644 index 00000000..7fc29674 --- /dev/null +++ b/test/fixtures/docker/root1/with-a-file/Dockerfile @@ -0,0 +1,3 @@ +FROM eu.gcr.io/my-test-repository/root1:v1 + +LABEL name="with-a-file" diff --git a/test/fixtures/docker/root1/with-a-file/included.txt b/test/fixtures/docker/root1/with-a-file/included.txt new file mode 100644 index 00000000..e69de29b diff --git a/test/fixtures/docker/root2/Dockerfile b/test/fixtures/docker/root2/Dockerfile new file mode 100644 index 00000000..dedc38ce --- /dev/null +++ b/test/fixtures/docker/root2/Dockerfile @@ -0,0 +1,3 @@ +FROM apache/superset + +LABEL name="root2" diff --git a/test/fixtures/docker/root2/root3/Dockerfile b/test/fixtures/docker/root2/root3/Dockerfile new file mode 100644 index 00000000..3676f7f7 --- /dev/null +++ b/test/fixtures/docker/root2/root3/Dockerfile @@ -0,0 +1,3 @@ +FROM bitnami/elasticsearch + +LABEL name="root3" diff --git a/test/fixtures/docker/two-parents/Dockerfile b/test/fixtures/docker/two-parents/Dockerfile new file mode 100644 index 00000000..87ff0ba0 --- /dev/null +++ b/test/fixtures/docker/two-parents/Dockerfile @@ -0,0 +1,4 @@ +FROM eu.gcr.io/my-test-repository/root1:v1 as builder +FROM eu.gcr.io/my-test-repository/root2:v1 + +LABEL name="two-parents"