diff --git a/README.md b/README.md index bc661702e3..0dbdab90a4 100644 --- a/README.md +++ b/README.md @@ -380,6 +380,30 @@ Use this command to install the package in Kibana. The command uses Kibana API to install the package in Kibana. The package must be exposed via the Package Registry or built locally in zip format so they can be installed using --zip parameter. Zip packages can be installed directly in Kibana >= 8.7.0. More details in this [HOWTO guide](https://github.com/elastic/elastic-package/blob/main/docs/howto/install_package.md). +### `elastic-package links` + +_Context: global_ + +Use this command to manage linked files in the repository. + +### `elastic-package links check` + +_Context: global_ + +Use this command to check if linked files references inside the current directory are up to date. + +### `elastic-package links list` + +_Context: global_ + +Use this command to list all packages that have linked file references that include the current directory. + +### `elastic-package links update` + +_Context: global_ + +Use this command to update all linked files references inside the current directory. + ### `elastic-package lint` _Context: package_ diff --git a/cmd/links.go b/cmd/links.go new file mode 100644 index 0000000000..ff365c1f72 --- /dev/null +++ b/cmd/links.go @@ -0,0 +1,144 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package cmd + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/elastic/elastic-package/internal/cobraext" + "github.com/elastic/elastic-package/internal/files" +) + +const ( + linksLongDescription = `Use this command to manage linked files in the repository.` + linksCheckLongDescription = `Use this command to check if linked files references inside the current directory are up to date.` + linksUpdateLongDescription = `Use this command to update all linked files references inside the current directory.` + linksListLongDescription = `Use this command to list all packages that have linked file references that include the current directory.` +) + +func setupLinksCommand() *cobraext.Command { + cmd := &cobra.Command{ + Use: "links", + Short: "Manage linked files", + Long: linksLongDescription, + RunE: func(parent *cobra.Command, args []string) error { + return cobraext.ComposeCommandsParentContext(parent, args, parent.Commands()...) + }, + } + + cmd.AddCommand(getLinksCheckCommand()) + cmd.AddCommand(getLinksUpdateCommand()) + cmd.AddCommand(getLinksListCommand()) + + return cobraext.NewCommand(cmd, cobraext.ContextGlobal) +} + +func getLinksCheckCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "check", + Short: "Check for linked files changes", + Long: linksCheckLongDescription, + Args: cobra.NoArgs, + RunE: linksCheckCommandAction, + } + return cmd +} + +func linksCheckCommandAction(cmd *cobra.Command, args []string) error { + cmd.Printf("Check for linked files changes\n") + pwd, err := os.Getwd() + if err != nil { + return fmt.Errorf("reading current working directory failed: %w", err) + } + + linkedFiles, err := files.CheckLinkedFiles(pwd) + if err != nil { + return fmt.Errorf("checking linked files are up-to-date failed: %w", err) + } + for _, f := range linkedFiles { + if !f.UpToDate { + cmd.Printf("%s is outdated.\n", filepath.Join(f.WorkDir, f.LinkFilePath)) + } + } + if len(linkedFiles) > 0 { + return fmt.Errorf("linked files are outdated") + } + return nil +} + +func getLinksUpdateCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "update", + Short: "Update linked files checksums if needed.", + Long: linksUpdateLongDescription, + Args: cobra.NoArgs, + RunE: linksUpdateCommandAction, + } + return cmd +} + +func linksUpdateCommandAction(cmd *cobra.Command, args []string) error { + cmd.Printf("Update linked files checksums if needed.\n") + pwd, err := os.Getwd() + if err != nil { + return fmt.Errorf("reading current working directory failed: %w", err) + } + + updatedLinks, err := files.UpdateLinkedFiles(pwd) + if err != nil { + return fmt.Errorf("updating linked files checksums failed: %w", err) + } + + for _, l := range updatedLinks { + cmd.Printf("%s was updated.\n", l.LinkFilePath) + } + + return nil +} + +func getLinksListCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List packages linking files from this path", + Long: linksListLongDescription, + Args: cobra.NoArgs, + RunE: linksListCommandAction, + } + cmd.Flags().BoolP(cobraext.PackagesFlagName, "", false, cobraext.PackagesFlagDescription) + return cmd +} + +func linksListCommandAction(cmd *cobra.Command, args []string) error { + onlyPackages, err := cmd.Flags().GetBool(cobraext.PackagesFlagName) + if err != nil { + return cobraext.FlagParsingError(err, cobraext.PackagesFlagName) + } + + pwd, err := os.Getwd() + if err != nil { + return fmt.Errorf("reading current working directory failed: %w", err) + } + + byPackage, err := files.ListLinkedFilesByPackage(pwd) + if err != nil { + return fmt.Errorf("listing linked packages failed: %w", err) + } + + for _, pkg := range byPackage { + if onlyPackages { + cmd.Println(pkg.PackageName) + continue + } + for _, link := range pkg.Links { + cmd.Println(link) + } + } + + return nil +} diff --git a/cmd/lint.go b/cmd/lint.go index a2a26025fe..bba2188f9d 100644 --- a/cmd/lint.go +++ b/cmd/lint.go @@ -45,7 +45,6 @@ func setupLintCommand() *cobraext.Command { func lintCommandAction(cmd *cobra.Command, args []string) error { cmd.Println("Lint the package") - readmeFiles, err := docs.AreReadmesUpToDate() if err != nil { for _, f := range readmeFiles { diff --git a/cmd/root.go b/cmd/root.go index 52478b8b2b..e449ff5169 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -28,6 +28,7 @@ var commands = []*cobraext.Command{ setupExportCommand(), setupFormatCommand(), setupInstallCommand(), + setupLinksCommand(), setupLintCommand(), setupProfilesCommand(), setupReportsCommand(), diff --git a/docs/howto/links.md b/docs/howto/links.md new file mode 100644 index 0000000000..f288485069 --- /dev/null +++ b/docs/howto/links.md @@ -0,0 +1,110 @@ +# HOWTO: Use links to reuse common files. + +## Introduction + +Many packages have files that are equal between them. This is more common in pipelines, +input configurations, and field definitions. + +In order to help developers, there is the ability to define links, so a file that might be reused needs to only be defined once, and can be reused from any other packages. + + +# Links + +Currently, there are some specific places where links can be defined: + +- `elasticsearch/ingest_pipeline` +- `data_stream/**/elasticsearch/ingest_pipeline` +- `agent/input` +- `data_stream/**/agent/stream` +- `data_stream/**/fields` + +A link consists of a file with a `.link` extension that contains a path, relative to its location, to the file that it will be replaced with. It also consists of a checksum to validate the linked file is up to date with the package expectations. + +`data_stream/foo/elasticsearch/ingest_pipeline/default.yml.link` + +``` +../../../../../testpackage/data_stream/test/elasticsearch/ingest_pipeline/default.yml f7c5f0c03aca8ef68c379a62447bdafbf0dcf32b1ff2de143fd6878ee01a91ad +``` + +This will use the contents of the linked file during validation, tests, and building of the package, so functionally nothing changes from the package point of view. + +## The `_dev/shared` folder + +As a convenience, shared files can be placed under `_dev/shared` if they are going to be +reused from several places. They can even be added outside of any package, in any place in the repository. + +## Managing Links with elastic-package + +The `elastic-package` tool provides several subcommands to help manage linked files: + +### `elastic-package links check` + +Check if all linked files in the current directory and its subdirectories are up to date. This command verifies that the checksums in link files match the actual content of the included files. + +```bash +elastic-package links check +``` + +This command will: +- Scan for all `.link` files in the current directory tree +- Validate that each linked file's checksum matches the included file's current content +- Report any outdated link files that need updating +- Exit with an error if any link files are outdated + +### `elastic-package links update` + +Update the checksums of all outdated linked files in the current directory and its subdirectories. + +```bash +elastic-package links update +``` + +This command will: +- Find all `.link` files that have outdated checksums +- Calculate new checksums for the included files +- Update the `.link` files with the new checksums +- Report which link files were updated + +### `elastic-package links list` + +List all packages that have linked files referencing content from the current directory. + +```bash +# List all linked file paths +elastic-package links list + +# List only package names (without individual file paths) +elastic-package links list --packages +``` + +This command will: +- Find all `.link` files in the repository that reference files in the current directory +- Group the results by package name +- Display either the full file paths or just the package names (with `--packages` flag) + +## Workflow + +A typical workflow for managing linked files: + +1. **Create a shared file** in a central location (e.g., `_dev/shared/` or in a reference package) + +2. **Create link files** in packages that need to reference the shared file: + ```bash + echo "../../_dev/shared/common-pipeline.yml" > data_stream/logs/elasticsearch/ingest_pipeline/default.yml.link + ``` + +3. **Update checksums** to make the link valid: + ```bash + elastic-package links update + ``` + +4. **Check links regularly** to ensure they stay up to date: + ```bash + elastic-package links check + ``` + +5. **When modifying shared files**, update all dependent links: + ```bash + # After editing a shared file, update all links that reference it + elastic-package links update + ``` diff --git a/internal/builder/packages.go b/internal/builder/packages.go index f71273c2d2..13c8881908 100644 --- a/internal/builder/packages.go +++ b/internal/builder/packages.go @@ -185,6 +185,15 @@ func BuildPackage(ctx context.Context, options BuildOptions) (string, error) { return "", fmt.Errorf("adding dynamic mappings: %w", err) } + logger.Debug("Include linked files") + links, err := files.IncludeLinkedFilesFromPath(options.PackageRoot, destinationDir) + if err != nil { + return "", fmt.Errorf("including linked files failed: %w", err) + } + for _, l := range links { + logger.Debugf("Linked file included (path: %s)", l.TargetFilePath(destinationDir)) + } + if options.CreateZip { return buildZippedPackage(ctx, options, destinationDir) } diff --git a/internal/cobraext/flags.go b/internal/cobraext/flags.go index 6307c553e0..1838404fd6 100644 --- a/internal/cobraext/flags.go +++ b/internal/cobraext/flags.go @@ -138,6 +138,9 @@ const ( GenerateTestResultFlagName = "generate" GenerateTestResultFlagDescription = "generate test result file" + PackagesFlagName = "packages" + PackagesFlagDescription = "whether to return packages names or complete paths for the linked files found" + IngestPipelineIDsFlagName = "id" IngestPipelineIDsFlagDescription = "Elasticsearch ingest pipeline IDs (comma-separated values)" diff --git a/internal/elasticsearch/ingest/datastream.go b/internal/elasticsearch/ingest/datastream.go index 6d4848446c..17e2b9289b 100644 --- a/internal/elasticsearch/ingest/datastream.go +++ b/internal/elasticsearch/ingest/datastream.go @@ -19,13 +19,16 @@ import ( "gopkg.in/yaml.v3" "github.com/elastic/elastic-package/internal/elasticsearch" + "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/packages" ) var ( - ingestPipelineTag = regexp.MustCompile(`{{\s*IngestPipeline.+}}`) - defaultPipelineJSON = "default.json" - defaultPipelineYML = "default.yml" + ingestPipelineTag = regexp.MustCompile(`{{\s*IngestPipeline.+}}`) + defaultPipelineJSON = "default.json" + defaultPipelineJSONLink = "default.json.link" + defaultPipelineYML = "default.yml" + defaultPipelineYMLLink = "default.yml.link" ) type Rule struct { @@ -71,7 +74,7 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er elasticsearchPath := filepath.Join(dataStreamPath, "elasticsearch", "ingest_pipeline") var pipelineFiles []string - for _, pattern := range []string{"*.json", "*.yml"} { + for _, pattern := range []string{"*.json", "*.yml", "*.link"} { files, err := filepath.Glob(filepath.Join(elasticsearchPath, pattern)) if err != nil { return nil, fmt.Errorf("listing '%s' in '%s': %w", pattern, elasticsearchPath, err) @@ -79,9 +82,13 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er pipelineFiles = append(pipelineFiles, files...) } + linksFS, err := files.CreateLinksFSFromPath(elasticsearchPath) + if err != nil { + return nil, fmt.Errorf("creating links filesystem failed: %w", err) + } var pipelines []Pipeline for _, path := range pipelineFiles { - c, err := os.ReadFile(path) + c, err := linksFS.ReadFile(path) if err != nil { return nil, fmt.Errorf("reading ingest pipeline failed (path: %s): %w", path, err) } @@ -108,7 +115,7 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er pipelines = append(pipelines, Pipeline{ Path: path, Name: getPipelineNameWithNonce(name[:strings.Index(name, ".")], nonce), - Format: filepath.Ext(path)[1:], + Format: filepath.Ext(strings.TrimSuffix(path, ".link"))[1:], Content: cWithRerouteProcessors, ContentOriginal: c, }) @@ -119,7 +126,8 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er func addRerouteProcessors(pipeline []byte, dataStreamPath, path string) ([]byte, error) { // Only attach routing_rules.yml reroute processors after the default pipeline filename := filepath.Base(path) - if filename != defaultPipelineJSON && filename != defaultPipelineYML { + if filename != defaultPipelineJSON && filename != defaultPipelineYML && + filename != defaultPipelineJSONLink && filename != defaultPipelineYMLLink { return pipeline, nil } diff --git a/internal/files/copy.go b/internal/files/copy.go index 2220cf2a63..9e0733f0c4 100644 --- a/internal/files/copy.go +++ b/internal/files/copy.go @@ -13,7 +13,7 @@ import ( var ( defaultFoldersToSkip = []string{"_dev", "build", ".git"} - defaultFileGlobsToSkip = []string{".DS_Store", ".*.swp"} + defaultFileGlobsToSkip = []string{".DS_Store", ".*.swp", "*.link"} ) // CopyAll method copies files from the source to the destination skipping empty directories. diff --git a/internal/files/linkedfiles.go b/internal/files/linkedfiles.go new file mode 100644 index 0000000000..4acc03b0d4 --- /dev/null +++ b/internal/files/linkedfiles.go @@ -0,0 +1,596 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package files + +import ( + "bufio" + "crypto/sha256" + "encoding/hex" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "slices" + "strings" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +const linkExtension = ".link" + +// PackageLinks represents linked files grouped by package. +type PackageLinks struct { + PackageName string + Links []string +} + +// CheckLinkedFiles checks if all linked files in the given directory are up-to-date. +// Returns a list of outdated links that need updating. +func CheckLinkedFiles(fromDir string) ([]Link, error) { + repoRoot, err := FindRepositoryRootDirectory() + if err != nil { + return nil, fmt.Errorf("finding repository root: %w", err) + } + + root, err := os.OpenRoot(repoRoot) + if err != nil { + return nil, fmt.Errorf("opening repository root: %w", err) + } + + return AreLinkedFilesUpToDate(root, fromDir) +} + +// UpdateLinkedFiles updates the checksums of all outdated linked files in the given directory. +// Returns a list of links that were updated. +func UpdateLinkedFiles(fromDir string) ([]Link, error) { + repoRoot, err := FindRepositoryRootDirectory() + if err != nil { + return nil, fmt.Errorf("finding repository root: %w", err) + } + + root, err := os.OpenRoot(repoRoot) + if err != nil { + return nil, fmt.Errorf("opening repository root: %w", err) + } + + return UpdateLinkedFilesChecksums(root, fromDir) +} + +// IncludeLinkedFilesFromPath copies all linked files from the source directory to the target directory. +// This is used during package building to include linked files in the build output. +func IncludeLinkedFilesFromPath(fromDir, toDir string) ([]Link, error) { + repoRoot, err := FindRepositoryRootDirectory() + if err != nil { + return nil, fmt.Errorf("finding repository root: %w", err) + } + + root, err := os.OpenRoot(repoRoot) + if err != nil { + return nil, fmt.Errorf("opening repository root: %w", err) + } + + return IncludeLinkedFiles(root, fromDir, toDir) +} + +// ListLinkedFilesByPackage returns a mapping of packages to their linked files that reference +// files from the given directory. +func ListLinkedFilesByPackage(fromDir string) ([]PackageLinks, error) { + repoRoot, err := FindRepositoryRootDirectory() + if err != nil { + return nil, fmt.Errorf("finding repository root: %w", err) + } + + root, err := os.OpenRoot(repoRoot) + if err != nil { + return nil, fmt.Errorf("opening repository root: %w", err) + } + + return LinkedFilesByPackageFrom(root, fromDir) +} + +// CreateLinksFSFromPath creates a LinksFS for the given directory within the repository. +func CreateLinksFSFromPath(workDir string) (*LinksFS, error) { + repoRoot, err := FindRepositoryRootDirectory() + if err != nil { + return nil, fmt.Errorf("finding repository root: %w", err) + } + + root, err := os.OpenRoot(repoRoot) + if err != nil { + return nil, fmt.Errorf("opening repository root: %w", err) + } + + return NewLinksFS(root, workDir) +} + +var _ fs.FS = (*LinksFS)(nil) + +// LinksFS is a filesystem that handles linked files. +// It wraps another filesystem and checks for linked files with the ".link" extension. +// If a linked file is found, it reads the link file to determine the included file +// and its checksum. If the included file is up to date, it returns the included file. +// Otherwise, it returns an error. +type LinksFS struct { + repoRoot *os.Root // The root of the repository, used to check if paths are within the repository. + workDir string + inner fs.FS +} + +// NewLinksFS creates a new LinksFS. +func NewLinksFS(repoRoot *os.Root, workDir string) (*LinksFS, error) { + // Ensure workDir is absolute for os.DirFS + var absWorkDir string + if filepath.IsAbs(workDir) { + absWorkDir = workDir + } else { + absWorkDir = filepath.Join(repoRoot.Name(), workDir) + } + + // Validate that workDir is within the repository root + inRoot, err := pathIsInRepositoryRoot(repoRoot, absWorkDir) + if err != nil { + return nil, fmt.Errorf("could not validate workDir %s: %w", absWorkDir, err) + } + if !inRoot { + return nil, fmt.Errorf("workDir %s is outside the repository root %s", absWorkDir, repoRoot.Name()) + } + + return &LinksFS{repoRoot: repoRoot, workDir: absWorkDir, inner: os.DirFS(absWorkDir)}, nil +} + +// Open opens a file in the filesystem. +func (lfs *LinksFS) Open(name string) (fs.File, error) { + // Ensure name is relative for os.DirFS compatibility + var relativeName string + if filepath.IsAbs(name) { + var err error + relativeName, err = filepath.Rel(lfs.workDir, name) + if err != nil { + return nil, fmt.Errorf("could not make name relative to workDir: %w", err) + } + } else { + relativeName = name + } + + // For non-link files, use the inner filesystem + if filepath.Ext(relativeName) != linkExtension { + return lfs.inner.Open(relativeName) + } + + // For link files, construct the absolute path to the link file + // Since workDir is expected to be absolute, we can directly join + linkFilePath := filepath.Join(lfs.workDir, relativeName) + + l, err := NewLinkedFile(lfs.repoRoot, linkFilePath) + if err != nil { + return nil, err + } + if !l.UpToDate { + return nil, fmt.Errorf("linked file %s is not up to date", relativeName) + } + + // Calculate the included file path relative to the link file's directory + linkDir := filepath.Dir(linkFilePath) + includedPath := filepath.Join(linkDir, l.IncludedFilePath) + + // Convert to relative path from repository root for secure access of target file + relativePath, err := filepath.Rel(lfs.repoRoot.Name(), includedPath) + if err != nil { + return nil, fmt.Errorf("could not get relative path: %w", err) + } + + return lfs.repoRoot.Open(relativePath) +} + +// ReadFile reads a file from the filesystem. +func (lfs *LinksFS) ReadFile(name string) ([]byte, error) { + f, err := lfs.Open(name) + if err != nil { + return nil, err + } + defer f.Close() + return io.ReadAll(f) +} + +// A Link represents a linked file. +// It contains the path to the link file, the checksum of the link file, +// the path to the included file, and the checksum of the included file contents. +// It also contains a boolean indicating whether the link is up to date. +type Link struct { + WorkDir string + + LinkFilePath string + LinkChecksum string + LinkPackageName string + + IncludedFilePath string + IncludedFileContentsChecksum string + IncludedPackageName string + + UpToDate bool +} + +// NewLinkedFile creates a new Link from the given link file path. +func NewLinkedFile(root *os.Root, linkFilePath string) (Link, error) { + var l Link + l.WorkDir = filepath.Dir(linkFilePath) + if linkPackageRoot, _, _ := packages.FindPackageRootFrom(l.WorkDir); linkPackageRoot != "" { + l.LinkPackageName = filepath.Base(linkPackageRoot) + } + + firstLine, err := readFirstLine(linkFilePath) + if err != nil { + return Link{}, err + } + l.LinkFilePath, err = filepath.Rel(l.WorkDir, linkFilePath) + if err != nil { + return Link{}, fmt.Errorf("could not get relative path: %w", err) + } + + fields := strings.Fields(firstLine) + if len(fields) == 0 { + return Link{}, fmt.Errorf("link file %s is empty or has no valid content", linkFilePath) + } + if len(fields) > 2 { + return Link{}, fmt.Errorf("link file %s has invalid format: expected 1 or 2 fields, got %d", linkFilePath, len(fields)) + } + l.IncludedFilePath = fields[0] + if len(fields) == 2 { + l.LinkChecksum = fields[1] + } + + pathName := filepath.Clean(filepath.Join(l.WorkDir, filepath.FromSlash(l.IncludedFilePath))) + + inRoot, err := pathIsInRepositoryRoot(root, pathName) + if err != nil { + return Link{}, fmt.Errorf("could not check if path %s is in repository root: %w", pathName, err) + } + if !inRoot { + return Link{}, fmt.Errorf("path %s escapes the repository root", pathName) + } + + // Store the original absolute path for package root detection + originalAbsPath := pathName + + // Convert to relative path for secure access of target file + if filepath.IsAbs(pathName) { + pathName, err = filepath.Rel(root.Name(), pathName) + if err != nil { + return Link{}, fmt.Errorf("could not get relative path: %w", err) + } + } + + if _, err := root.Stat(pathName); err != nil { + return Link{}, err + } + + cs, err := getLinkedFileChecksumFromRoot(root, pathName) + if err != nil { + return Link{}, fmt.Errorf("could not collect file %s: %w", l.IncludedFilePath, err) + } + if l.LinkChecksum == cs { + l.UpToDate = true + } + l.IncludedFileContentsChecksum = cs + + if includedPackageRoot, _, _ := packages.FindPackageRootFrom(filepath.Dir(originalAbsPath)); includedPackageRoot != "" { + l.IncludedPackageName = filepath.Base(includedPackageRoot) + } + + return l, nil +} + +// UpdateChecksum function updates the checksum of the linked file. +// It returns true if the checksum was updated, false if it was already up-to-date. +func (l *Link) UpdateChecksum() (bool, error) { + if l.UpToDate { + return false, nil + } + if l.IncludedFilePath == "" { + return false, fmt.Errorf("included file path is empty for link file %s", l.LinkFilePath) + } + if l.IncludedFileContentsChecksum == "" { + return false, fmt.Errorf("checksum is empty for included file %s", l.IncludedFilePath) + } + newContent := fmt.Sprintf("%v %v", filepath.ToSlash(l.IncludedFilePath), l.IncludedFileContentsChecksum) + if err := writeFile(filepath.Join(l.WorkDir, l.LinkFilePath), []byte(newContent)); err != nil { + return false, fmt.Errorf("could not update checksum for link file %s: %w", l.LinkFilePath, err) + } + l.LinkChecksum = l.IncludedFileContentsChecksum + l.UpToDate = true + return true, nil +} + +// TargetFilePath returns the path where the linked file should be written. +// If workDir is provided, it uses that as the base directory, otherwise uses the link's WorkDir. +func (l *Link) TargetFilePath(workDir ...string) string { + targetFilePath := filepath.FromSlash(strings.TrimSuffix(l.LinkFilePath, linkExtension)) + wd := l.WorkDir + if len(workDir) > 0 { + wd = workDir[0] + } + return filepath.Join(wd, targetFilePath) +} + +// IncludeLinkedFiles function includes linked files from the source +// directory to the target directory. +// It returns a slice of Link structs representing the included files. +// It also updates the checksum of the linked files. +// Both directories must be relative to the root. +func IncludeLinkedFiles(root *os.Root, fromDir, toDir string) ([]Link, error) { + links, err := ListLinkedFiles(root, fromDir) + if err != nil { + return nil, fmt.Errorf("including linked files failed: %w", err) + } + for _, l := range links { + if _, err := l.UpdateChecksum(); err != nil { + return nil, fmt.Errorf("could not update checksum for file %s: %w", l.LinkFilePath, err) + } + targetFilePath := l.TargetFilePath(toDir) + if err := copyFromRoot( + root, + filepath.Join(l.WorkDir, filepath.FromSlash(l.IncludedFilePath)), + targetFilePath, + ); err != nil { + return nil, fmt.Errorf("could not write file %s: %w", targetFilePath, err) + } + } + + return links, nil +} + +// ListLinkedFiles function returns a slice of Link structs representing linked files. +func ListLinkedFiles(root *os.Root, fromDir string) ([]Link, error) { + var linkFiles []string + if err := filepath.Walk( + filepath.FromSlash(fromDir), + func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() && strings.HasSuffix(info.Name(), linkExtension) { + linkFiles = append(linkFiles, path) + } + return nil + }); err != nil { + return nil, err + } + + links := make([]Link, len(linkFiles)) + + for i, f := range linkFiles { + l, err := NewLinkedFile(root, filepath.FromSlash(f)) + if err != nil { + return nil, fmt.Errorf("could not initialize linked file %s: %w", f, err) + } + links[i] = l + } + + return links, nil +} + +// createDirInRoot function creates a directory and all its parents within the root. +func createDirInRoot(root *os.Root, dir string) error { + dir = filepath.Clean(dir) + if dir == "." || dir == "/" { + return nil + } + + // Check if the directory already exists + if _, err := root.Stat(dir); err == nil { + return nil + } + + // Create parent directory first + parent := filepath.Dir(dir) + if parent != dir { // Avoid infinite recursion + if err := createDirInRoot(root, parent); err != nil { + return err + } + } + + // Create the directory + return root.Mkdir(dir, 0700) +} + +// copyFromRoot function copies a file from to to inside the root. +func copyFromRoot(root *os.Root, from, to string) error { + var err error + if filepath.IsAbs(from) { + from, err = filepath.Rel(root.Name(), filepath.FromSlash(from)) + if err != nil { + return fmt.Errorf("could not get relative path: %w", err) + } + } + source, err := root.Open(from) + if err != nil { + return err + } + defer source.Close() + + if filepath.IsAbs(to) { + to, err = filepath.Rel(root.Name(), filepath.FromSlash(to)) + if err != nil { + return fmt.Errorf("could not get relative path: %w", err) + } + } + dir := filepath.Dir(to) + if _, err := root.Stat(dir); os.IsNotExist(err) { + if err := createDirInRoot(root, dir); err != nil { + return err + } + } + destination, err := root.Create(to) + if err != nil { + return err + } + defer destination.Close() + + _, err = io.Copy(destination, source) + return err +} + +// writeFile function writes a byte slice to a file inside the root. +func writeFile(to string, b []byte) error { + to = filepath.FromSlash(to) + if _, err := os.Stat(filepath.Dir(to)); os.IsNotExist(err) { + if err := os.MkdirAll(filepath.Dir(to), 0700); err != nil { + return err + } + } + return os.WriteFile(to, b, 0644) +} + +// AreLinkedFilesUpToDate function checks if all the linked files are up-to-date. +func AreLinkedFilesUpToDate(root *os.Root, fromDir string) ([]Link, error) { + links, err := ListLinkedFiles(root, fromDir) + if err != nil { + return nil, fmt.Errorf("checking linked files failed: %w", err) + } + + var outdated []Link + for _, l := range links { + logger.Debugf("Check if %s is up-to-date", l.LinkFilePath) + if !l.UpToDate { + outdated = append(outdated, l) + } + } + + return outdated, nil +} + +// UpdateLinkedFilesChecksums function updates the checksums of the linked files. +// It returns a slice of updated links. +// If no links were updated, it returns an empty slice. +func UpdateLinkedFilesChecksums(root *os.Root, fromDir string) ([]Link, error) { + links, err := ListLinkedFiles(root, fromDir) + if err != nil { + return nil, fmt.Errorf("updating linked files checksums failed: %w", err) + } + + var updatedLinks []Link + for _, l := range links { + updated, err := l.UpdateChecksum() + if err != nil { + return nil, fmt.Errorf("updating linked files checksums failed: %w", err) + } + if updated { + updatedLinks = append(updatedLinks, l) + } + } + + return updatedLinks, nil +} + +// LinkedFilesByPackageFrom function returns a slice of PackageLinks containing linked files grouped by package. +// Each PackageLinks contains the package name and a slice of linked file paths. +func LinkedFilesByPackageFrom(root *os.Root, fromDir string) ([]PackageLinks, error) { + // we list linked files from all the root directory + // to check which ones are linked to the 'fromDir' package + links, err := ListLinkedFiles(root, root.Name()) + if err != nil { + return nil, fmt.Errorf("listing linked files failed: %w", err) + } + + var packageName string + if packageRoot, _, _ := packages.FindPackageRootFrom(fromDir); packageRoot != "" { + packageName = filepath.Base(packageRoot) + } + byPackageMap := map[string][]string{} + for _, l := range links { + if l.LinkPackageName == l.IncludedPackageName || + packageName != l.IncludedPackageName { + continue + } + byPackageMap[l.LinkPackageName] = append(byPackageMap[l.LinkPackageName], filepath.Join(l.WorkDir, l.LinkFilePath)) + } + + var packages []string + for p := range byPackageMap { + packages = append(packages, p) + } + slices.Sort(packages) + + var result []PackageLinks + for _, p := range packages { + result = append(result, PackageLinks{ + PackageName: p, + Links: byPackageMap[p], + }) + } + return result, nil +} + +// getLinkedFileChecksumFromRoot calculates the SHA256 checksum of a file using root-relative access. +func getLinkedFileChecksumFromRoot(root *os.Root, relativePath string) (string, error) { + file, err := root.Open(filepath.FromSlash(relativePath)) + if err != nil { + return "", err + } + defer file.Close() + + b, err := io.ReadAll(file) + if err != nil { + return "", err + } + cs, err := checksum(b) + if err != nil { + return "", err + } + return cs, nil +} + +// readFirstLine reads and returns the first line of a file. +func readFirstLine(filePath string) (string, error) { + file, err := os.Open(filepath.FromSlash(filePath)) + if err != nil { + return "", err + } + defer file.Close() + + scanner := bufio.NewScanner(file) + if scanner.Scan() { + return scanner.Text(), nil + } + + if err := scanner.Err(); err != nil { + return "", err + } + + return "", fmt.Errorf("file is empty or first line is missing") +} + +// checksum calculates the SHA256 checksum of a byte slice. +func checksum(b []byte) (string, error) { + hash := sha256.Sum256(b) + return hex.EncodeToString(hash[:]), nil +} + +// pathIsInRepositoryRoot checks if a path is within the repository root and doesn't escape it. +func pathIsInRepositoryRoot(root *os.Root, path string) (bool, error) { + path = filepath.FromSlash(path) + var err error + if filepath.IsAbs(path) { + path, err = filepath.Rel(root.Name(), path) + if err != nil { + return false, fmt.Errorf("could not get relative path: %w", err) + } + } + + // Clean the path to resolve any ".." components + cleanPath := filepath.Clean(path) + + // Check if the cleaned path tries to escape the root + if strings.HasPrefix(cleanPath, "..") { + return false, nil + } + + if _, err := root.Stat(cleanPath); err != nil { + return false, nil + } + return true, nil +} diff --git a/internal/files/linkedfiles_test.go b/internal/files/linkedfiles_test.go new file mode 100644 index 0000000000..03d327e5c6 --- /dev/null +++ b/internal/files/linkedfiles_test.go @@ -0,0 +1,861 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package files + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestLinkUpdateChecksum tests the UpdateChecksum method of the Link struct. +// This test verifies that: +// 1. An outdated link file (without checksum) can be updated correctly +// 2. An up-to-date link file (with correct checksum) doesn't need updating +// 3. The checksum calculation and file writing works properly +func TestLinkUpdateChecksum(t *testing.T) { + // Create a temporary directory and copy test data to avoid modifying originals + tempDir := t.TempDir() + wd, err := os.Getwd() + require.NoError(t, err) + testDataSrc := filepath.Join(wd, "testdata") + require.NoError(t, copyDir(testDataSrc, filepath.Join(tempDir, "testdata"))) + + // Set up paths within the temporary directory + basePath := filepath.Join(tempDir, "testdata/links") + + // Create an os.Root for secure file operations within tempDir + root, err := os.OpenRoot(tempDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // Test Case 1: Outdated link file (missing checksum) + // Load a link file that points to an included file but has no checksum + outdatedFile, err := NewLinkedFile(root, filepath.Join(basePath, "outdated.yml.link")) + require.NoError(t, err) + + // Verify initial state: file should not be up-to-date and have no checksum + assert.False(t, outdatedFile.UpToDate) + assert.Empty(t, outdatedFile.LinkChecksum) + + // Update the checksum and verify it was actually updated + updated, err := outdatedFile.UpdateChecksum() + assert.NoError(t, err) + assert.True(t, updated) // Should return true indicating an update occurred + + // Verify the checksum was calculated correctly (this is the SHA256 of the included file) + assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", outdatedFile.LinkChecksum) + assert.True(t, outdatedFile.UpToDate) + + // Test Case 2: Up-to-date link file (already has the correct checksum) + // Load a link file that already has the correct checksum + uptodateFile, err := NewLinkedFile(root, filepath.Join(basePath, "uptodate.yml.link")) + assert.NoError(t, err) + + // Verify it's already up-to-date + assert.True(t, uptodateFile.UpToDate) + + // Attempt to update - should return false since no update is needed + updated, err = uptodateFile.UpdateChecksum() + assert.NoError(t, err) + assert.False(t, updated) // Should return false indicating no update was needed +} + +// TestListLinkedFiles tests the ListLinkedFiles function that discovers and parses all link files in a directory. +// This test verifies that: +// 1. All .link files in the test directory are discovered (expects 2 files) +// 2. Each link file is correctly parsed with proper paths, checksums, and status +// 3. Outdated link files (without checksums) are identified correctly +// 4. Up-to-date link files (with matching checksums) are identified correctly +func TestListLinkedFiles(t *testing.T) { + // Get current working directory to locate test data + wd, err := os.Getwd() + assert.NoError(t, err) + basePath := filepath.Join(wd, filepath.FromSlash("testdata/links")) + + // Find the repository root to create a secure os.Root context + root, err := FindRepositoryRoot() + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // List all linked files in the test directory + linkedFiles, err := ListLinkedFiles(root, basePath) + require.NoError(t, err) + require.NotEmpty(t, linkedFiles) + require.Len(t, linkedFiles, 2) // Expect exactly 2 link files in testdata + + // Verify first file (outdated.yml.link) - should be outdated (no checksum) + assert.Equal(t, "outdated.yml.link", linkedFiles[0].LinkFilePath) + assert.Empty(t, linkedFiles[0].LinkChecksum) // No checksum = outdated + assert.Equal(t, "outdated.yml", linkedFiles[0].TargetFilePath("")) + assert.Equal(t, "./included.yml", linkedFiles[0].IncludedFilePath) + assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[0].IncludedFileContentsChecksum) + assert.False(t, linkedFiles[0].UpToDate) + + // Verify second file (uptodate.yml.link) - should be up-to-date (has matching checksum) + assert.Equal(t, "uptodate.yml.link", linkedFiles[1].LinkFilePath) + assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[1].LinkChecksum) + assert.Equal(t, "uptodate.yml", linkedFiles[1].TargetFilePath("")) + assert.Equal(t, "./included.yml", linkedFiles[1].IncludedFilePath) + assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[1].IncludedFileContentsChecksum) + assert.True(t, linkedFiles[1].UpToDate) +} + +// TestCopyFile tests the copyFromRoot helper function that securely copies files within the repository root. +// This test verifies that: +// 1. Files can be copied correctly within the repository boundaries using os.Root +// 2. The copied file has identical contents to the original +// 3. The copy operation works with the security abstraction (os.Root) +func TestCopyFile(t *testing.T) { + fileA := "fileA.txt" + fileB := "fileB.txt" + tempDir := t.TempDir() + + // Setup cleanup to remove test files after the test + t.Cleanup(func() { _ = os.Remove(filepath.Join(tempDir, fileA)) }) + t.Cleanup(func() { _ = os.Remove(filepath.Join(tempDir, fileB)) }) + + // Create a source file with test content + createDummyFile(t, filepath.Join(tempDir, fileA), "This is the content of the file.") + + // Create an os.Root for secure file operations within tempDir + root, err := os.OpenRoot(tempDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // Copy the file using the secure copyFromRoot function + assert.NoError(t, copyFromRoot(root, fileA, fileB)) + + // Verify that the copied file has identical content to the original + equal, err := filesEqual(filepath.Join(tempDir, fileA), filepath.Join(tempDir, fileB)) + require.NoError(t, err) + assert.True(t, equal, "files should be equal after copying") +} + +// TestAreLinkedFilesUpToDate tests the AreLinkedFilesUpToDate function that identifies outdated link files. +// This test verifies that: +// 1. The function correctly identifies which link files are outdated (missing or incorrect checksums) +// 2. Only outdated files are returned (up-to-date files are left unchanged) +// 3. The returned outdated file has correct metadata and status information +func TestAreLinkedFilesUpToDate(t *testing.T) { + // Get current working directory to locate test data + wd, err := os.Getwd() + assert.NoError(t, err) + basePath := filepath.Join(wd, filepath.FromSlash("testdata/links")) + + // Find the repository root to create a secure os.Root context + root, err := FindRepositoryRoot() + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // Get all outdated linked files from the test directory + linkedFiles, err := AreLinkedFilesUpToDate(root, basePath) + assert.NoError(t, err) + assert.NotEmpty(t, linkedFiles) + assert.Len(t, linkedFiles, 1) // Expect exactly 1 outdated file (outdated.yml.link) + + // Verify the outdated file details + assert.Equal(t, "outdated.yml.link", linkedFiles[0].LinkFilePath) + assert.Empty(t, linkedFiles[0].LinkChecksum) // No checksum indicates outdated + assert.Equal(t, "outdated.yml", linkedFiles[0].TargetFilePath("")) + assert.Equal(t, "./included.yml", linkedFiles[0].IncludedFilePath) + assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", linkedFiles[0].IncludedFileContentsChecksum) + assert.False(t, linkedFiles[0].UpToDate) +} + +// TestUpdateLinkedFilesChecksums tests the UpdateLinkedFilesChecksums function that updates outdated link files. +// This test verifies that: +// 1. The function correctly identifies and updates outdated link files with proper checksums +// 2. Only outdated files are updated (up-to-date files are left unchanged) +// 3. After updating, the previously outdated file becomes up-to-date with correct checksum +// 4. The function returns details about which files were updated +func TestUpdateLinkedFilesChecksums(t *testing.T) { + // Create a temporary directory and copy test data to avoid modifying originals + tempDir := t.TempDir() + wd, err := os.Getwd() + require.NoError(t, err) + testDataSrc := filepath.Join(wd, "testdata") + require.NoError(t, copyDir(testDataSrc, filepath.Join(tempDir, "testdata"))) + + // Set up paths within the temporary directory + basePath := filepath.Join(tempDir, "testdata/links") + + // Create an os.Root for secure file operations within tempDir + root, err := os.OpenRoot(tempDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // Update checksums for all outdated linked files + updated, err := UpdateLinkedFilesChecksums(root, basePath) + + // Verify the update operation succeeded + assert.NoError(t, err) + assert.NotEmpty(t, updated) + assert.Len(t, updated, 1) // Expect exactly 1 file was updated (outdated.yml.link) + + // Verify the updated file is now up-to-date with correct checksum + assert.True(t, updated[0].UpToDate) + assert.Equal(t, "d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e", updated[0].LinkChecksum) +} + +// TestLinkedFilesByPackageFrom tests the LinkedFilesByPackageFrom function that organizes linked files by package. +// This test verifies that: +// 1. The function correctly discovers and groups linked files by their source packages +// 2. The returned structure properly maps package names to their linked files +// 3. File paths are correctly constructed and resolved relative to the package directories +// 4. The specific test package "testpackage" is found with its expected linked file +func TestLinkedFilesByPackageFrom(t *testing.T) { + // Get current working directory to locate test data + wd, err := os.Getwd() + assert.NoError(t, err) + basePath := filepath.Join(wd, filepath.FromSlash("testdata/links")) + + // Find the repository root to create a secure os.Root context + root, err := FindRepositoryRoot() + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // Get linked files organized by package + packageLinks, err := LinkedFilesByPackageFrom(root, basePath) + assert.NoError(t, err) + assert.NotEmpty(t, packageLinks) + assert.Len(t, packageLinks, 1) // Expect 1 package group + + // Verify the package structure + pkg := packageLinks[0] + assert.Equal(t, "testpackage", pkg.PackageName) + assert.NotEmpty(t, pkg.Links) + assert.Len(t, pkg.Links, 1) // Expect 1 linked file in testpackage + + // Verify the linked file path ends with the expected relative path + match := strings.HasSuffix( + filepath.ToSlash(pkg.Links[0]), + "/testdata/testpackage/included.yml.link", + ) + assert.True(t, match) +} + +// TestIncludeLinkedFiles tests the IncludeLinkedFiles function that copies linked files to a destination directory. +// This test verifies that: +// 1. Linked files are correctly discovered from a source package directory +// 2. The included files are copied to the specified destination directory +// 3. The copied files have identical content to their original included files +// 4. The target file paths are correctly constructed in the destination +// 5. The function works with a temporary directory setup to avoid affecting real files +func TestIncludeLinkedFiles(t *testing.T) { + // Get current working directory to locate test data + wd, err := os.Getwd() + assert.NoError(t, err) + testPkg := filepath.Join(wd, filepath.FromSlash("testdata")) + + // Create a temporary directory and copy test data to avoid modifying originals + tempDir := t.TempDir() + require.NoError(t, copyDir(testPkg, filepath.Join(tempDir, "testdata"))) + + // Set up source and destination directories + fromDir := filepath.Join(tempDir, "testdata/testpackage") + toDir := filepath.Join(tempDir, "dest") + + // Create an os.Root for secure file operations within tempDir + root, err := os.OpenRoot(tempDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // Include (copy) all linked files from source to destination + linkedFiles, err := IncludeLinkedFiles(root, fromDir, toDir) + assert.NoError(t, err) + require.Equal(t, 1, len(linkedFiles)) // Expect 1 linked file to be processed + + // Verify the target file was created in the destination directory + assert.FileExists(t, linkedFiles[0].TargetFilePath(toDir)) + + // Verify the copied file has identical content to the original included file + equal, err := filesEqual( + filepath.Join(linkedFiles[0].WorkDir, filepath.FromSlash(linkedFiles[0].IncludedFilePath)), + linkedFiles[0].TargetFilePath(toDir), + ) + assert.NoError(t, err) + assert.True(t, equal, "files should be equal after copying") +} + +// createDummyFile is a test helper that creates a file with specified content. +// This helper ensures the file is created successfully and writes the provided content to it. +func createDummyFile(t *testing.T, filename, content string) { + file, err := os.Create(filename) + assert.NoError(t, err) + defer file.Close() + _, err = file.WriteString(content) + assert.NoError(t, err) +} + +// filesEqual is a test helper that compares the contents of two files for equality. +// Returns true if both files exist and have identical content, false otherwise. +// Any error reading the files is returned to the caller. +func filesEqual(file1, file2 string) (bool, error) { + f1, err := os.ReadFile(file1) + if err != nil { + return false, err + } + + f2, err := os.ReadFile(file2) + if err != nil { + return false, err + } + + return bytes.Equal(f1, f2), nil +} + +// copyDir recursively copies a directory from src to dst. +// This helper function is used in tests to create isolated copies of test data +// to avoid modifying the original test files during test execution. +func copyDir(src, dst string) error { + // Get properties of source. + srcInfo, err := os.Stat(src) + if err != nil { + return err + } + + // Create the destination directory. + err = os.MkdirAll(dst, srcInfo.Mode()) + if err != nil { + return err + } + + // Read the source directory. + dir, err := os.ReadDir(src) + if err != nil { + return err + } + + // Copy all entries. + for _, entry := range dir { + srcPath := filepath.Join(src, entry.Name()) + dstPath := filepath.Join(dst, entry.Name()) + + if entry.IsDir() { + // If it's a directory, recurse. + err = copyDir(srcPath, dstPath) + if err != nil { + return err + } + } else { + // It's a file, so copy it. + err = copyFile(srcPath, dstPath) + if err != nil { + return err + } + } + } + return nil +} + +// copyFile copies a single file from src to dst. +// This helper function is used by copyDir to copy individual files while preserving +// their content and permissions. It efficiently copies large files using io.Copy. +func copyFile(src, dst string) error { + // Open the source file for reading. + in, err := os.Open(src) + if err != nil { + return err + } + defer in.Close() + + // Create the destination file, overwriting it if it already exists. + out, err := os.Create(dst) + if err != nil { + return err + } + defer out.Close() + + // Use io.Copy to efficiently copy the contents from source to destination. + _, err = io.Copy(out, in) + if err != nil { + return err + } + + // Get the file information (metadata) from the source file. + info, err := os.Stat(src) + if err != nil { + return err + } + + // Set the permissions (mode) of the destination file to match the source file. + return os.Chmod(dst, info.Mode()) +} + +func TestNewLinkedFileRejectsPathTraversal(t *testing.T) { + tempDir := t.TempDir() + + // Create a repository root + repoDir := filepath.Join(tempDir, "repo") + err := os.MkdirAll(repoDir, 0755) + require.NoError(t, err) + + // Create a file outside the repository that we'll try to link to + outsideDir := filepath.Join(tempDir, "outside") + err = os.MkdirAll(outsideDir, 0755) + require.NoError(t, err) + outsideFile := filepath.Join(outsideDir, "secret.txt") + err = os.WriteFile(outsideFile, []byte("secret content"), 0644) + require.NoError(t, err) + + // Create a subdirectory in the repo for our link file + linkDir := filepath.Join(repoDir, "links") + err = os.MkdirAll(linkDir, 0755) + require.NoError(t, err) + + // Create a valid file within the repository for testing + validFile := filepath.Join(linkDir, "valid.txt") + err = os.WriteFile(validFile, []byte("valid content"), 0644) + require.NoError(t, err) + + // Test cases with different path traversal attempts + testCases := []struct { + name string + linkContent string + expectError bool + errorMessage string + }{ + { + name: "simple parent directory escape", + linkContent: "../../../outside/secret.txt", + expectError: true, + errorMessage: "escapes the repository root", + }, + { + name: "absolute path escape", + linkContent: outsideFile, + expectError: true, + errorMessage: "escapes the repository root", + }, + { + name: "complex path traversal", + linkContent: "../../repo/../outside/secret.txt", + expectError: true, + errorMessage: "escapes the repository root", + }, + { + name: "valid relative path", + linkContent: "./valid.txt", + expectError: false, + errorMessage: "", + }, + } + + root, err := os.OpenRoot(repoDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Create the malicious link file + linkFile := filepath.Join(linkDir, "malicious.link") + err := os.WriteFile(linkFile, []byte(tc.linkContent), 0644) + require.NoError(t, err) + + // Attempt to create a NewLinkedFile + _, err = NewLinkedFile(root, linkFile) + + if tc.expectError { + assert.Error(t, err) + assert.Contains(t, err.Error(), tc.errorMessage) + } else { + assert.NoError(t, err) + } + + // Clean up the link file for next iteration + os.Remove(linkFile) + }) + } +} + +func TestLinksFSSecurityIsolation(t *testing.T) { + tempDir := t.TempDir() + + // Create a repository root + repoDir := filepath.Join(tempDir, "repo") + err := os.MkdirAll(repoDir, 0755) + require.NoError(t, err) + + // Create a working directory inside repo + workDir := filepath.Join(repoDir, "work") + err = os.MkdirAll(workDir, 0755) + require.NoError(t, err) + + // Create a valid included file in the repo + includedFile := filepath.Join(workDir, "included.txt") + err = os.WriteFile(includedFile, []byte("included content"), 0644) + require.NoError(t, err) + + // Create a link file that points to the included file with proper checksum + linkFile := filepath.Join(workDir, "test.txt.link") + // Calculate the checksum of the included file + hash := sha256.Sum256([]byte("included content")) + checksum := hex.EncodeToString(hash[:]) + linkContent := fmt.Sprintf("./included.txt %s", checksum) + err = os.WriteFile(linkFile, []byte(linkContent), 0644) + require.NoError(t, err) + + // Create LinksFS + root, err := os.OpenRoot(repoDir) + require.NoError(t, err) + + // Get the relative path from repo root to work directory + relWorkDir, err := filepath.Rel(repoDir, workDir) + require.NoError(t, err) + + lfs, err := NewLinksFS(root, relWorkDir) + require.NoError(t, err) + + // Test opening the linked file - this should work and use the repository root + file, err := lfs.Open("test.txt.link") + require.NoError(t, err) + + // Use t.Cleanup to ensure file is closed before test cleanup on Windows + t.Cleanup(func() { + if root != nil { + _ = root.Close() + } + if file != nil { + file.Close() + } + }) + + // Read the content to ensure it's correct + content, err := io.ReadAll(file) + require.NoError(t, err) + assert.Equal(t, "included content", string(content)) +} + +// TestLinksFS_Open tests the LinksFS Open method with various path scenarios. +// This test ensures proper handling of absolute/relative paths and both link and non-link files. +func TestLinksFS_Open(t *testing.T) { + tempDir := t.TempDir() + + // Create repository structure + repoDir := filepath.Join(tempDir, "repo") + err := os.MkdirAll(repoDir, 0755) + require.NoError(t, err) + + workDir := filepath.Join(repoDir, "work") + err = os.MkdirAll(workDir, 0755) + require.NoError(t, err) + + // Create test files + regularFile := filepath.Join(workDir, "regular.txt") + err = os.WriteFile(regularFile, []byte("regular content"), 0644) + require.NoError(t, err) + + includedFile := filepath.Join(workDir, "included.txt") + includedContent := "included content" + err = os.WriteFile(includedFile, []byte(includedContent), 0644) + require.NoError(t, err) + + // Create link file with correct checksum + linkFile := filepath.Join(workDir, "linked.txt.link") + hash := sha256.Sum256([]byte(includedContent)) + checksum := hex.EncodeToString(hash[:]) + linkContent := fmt.Sprintf("./included.txt %s", checksum) + err = os.WriteFile(linkFile, []byte(linkContent), 0644) + require.NoError(t, err) + + // Create outdated link file (no checksum) + outdatedLinkFile := filepath.Join(workDir, "outdated.txt.link") + err = os.WriteFile(outdatedLinkFile, []byte("./included.txt"), 0644) + require.NoError(t, err) + + // Setup LinksFS with absolute workDir + root, err := os.OpenRoot(repoDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + lfs, err := NewLinksFS(root, workDir) + require.NoError(t, err) + + tests := []struct { + name string + fileName string + expectError bool + errorMsg string + expectFile bool + }{ + { + name: "open regular file with relative path", + fileName: "regular.txt", + expectFile: true, + }, + { + name: "open regular file with absolute path", + fileName: filepath.Join(workDir, "regular.txt"), + expectFile: true, + }, + { + name: "open up-to-date link file", + fileName: "linked.txt.link", + expectFile: true, + }, + { + name: "open up-to-date link file with absolute path", + fileName: filepath.Join(workDir, "linked.txt.link"), + expectFile: true, + }, + { + name: "open outdated link file should fail", + fileName: "outdated.txt.link", + expectError: true, + errorMsg: "not up to date", + }, + { + name: "open non-existent file should fail", + fileName: "nonexistent.txt", + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + file, err := lfs.Open(tc.fileName) + + if tc.expectError { + assert.Error(t, err) + if tc.errorMsg != "" { + assert.Contains(t, err.Error(), tc.errorMsg) + } + assert.Nil(t, file) + } else { + assert.NoError(t, err) + assert.NotNil(t, file) + + if file != nil { + // Verify we can read from the file + content, err := io.ReadAll(file) + assert.NoError(t, err) + + // For link files, content should be from the included file + if strings.HasSuffix(tc.fileName, ".link") { + assert.Equal(t, includedContent, string(content)) + } else { + assert.Equal(t, "regular content", string(content)) + } + + file.Close() + } + } + }) + } +} + +// TestLinksFS_RelativeWorkDir tests LinksFS with relative workDir paths. +func TestLinksFS_RelativeWorkDir(t *testing.T) { + tempDir := t.TempDir() + + // Create repository structure + repoDir := filepath.Join(tempDir, "repo") + err := os.MkdirAll(repoDir, 0755) + require.NoError(t, err) + + workDir := filepath.Join(repoDir, "work") + err = os.MkdirAll(workDir, 0755) + require.NoError(t, err) + + // Create test files + regularFile := filepath.Join(workDir, "regular.txt") + regularContent := "regular file content" + err = os.WriteFile(regularFile, []byte(regularContent), 0644) + require.NoError(t, err) + + includedFile := filepath.Join(workDir, "included.txt") + includedContent := "included file content" + err = os.WriteFile(includedFile, []byte(includedContent), 0644) + require.NoError(t, err) + + // Create link file with correct checksum + linkFile := filepath.Join(workDir, "linked.txt.link") + hash := sha256.Sum256([]byte(includedContent)) + checksum := hex.EncodeToString(hash[:]) + linkContent := fmt.Sprintf("./included.txt %s", checksum) + err = os.WriteFile(linkFile, []byte(linkContent), 0644) + require.NoError(t, err) + + // Setup LinksFS with relative workDir + root, err := os.OpenRoot(repoDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + // Use relative path "work" instead of absolute path + lfs, err := NewLinksFS(root, "work") + require.NoError(t, err) + + tests := []struct { + name string + fileName string + expectedContent string + expectError bool + }{ + { + name: "read regular file", + fileName: "regular.txt", + expectedContent: regularContent, + }, + { + name: "read linked file returns included content", + fileName: "linked.txt.link", + expectedContent: includedContent, + }, + { + name: "read non-existent file should fail", + fileName: "nonexistent.txt", + expectError: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + content, err := lfs.ReadFile(tc.fileName) + + if tc.expectError { + assert.Error(t, err) + assert.Nil(t, content) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expectedContent, string(content)) + } + }) + } +} + +// TestLinksFS_ErrorConditions tests various error conditions in LinksFS. +func TestLinksFS_ErrorConditions(t *testing.T) { + tempDir := t.TempDir() + + // Create repository structure + repoDir := filepath.Join(tempDir, "repo") + err := os.MkdirAll(repoDir, 0755) + require.NoError(t, err) + + workDir := filepath.Join(repoDir, "work") + err = os.MkdirAll(workDir, 0755) + require.NoError(t, err) + + // Create link file that points to non-existent file (this will fail security check) + brokenLinkFile := filepath.Join(workDir, "broken.txt.link") + err = os.WriteFile(brokenLinkFile, []byte("./nonexistent.txt"), 0644) + require.NoError(t, err) + + // Create link file with invalid format + invalidLinkFile := filepath.Join(workDir, "invalid.txt.link") + err = os.WriteFile(invalidLinkFile, []byte(""), 0644) + require.NoError(t, err) + + // Setup LinksFS + root, err := os.OpenRoot(repoDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + lfs, err := NewLinksFS(root, workDir) + require.NoError(t, err) + + tests := []struct { + name string + fileName string + errorMsg string + }{ + { + name: "broken link to non-existent file", + fileName: "broken.txt.link", + errorMsg: "escapes the repository root", + }, + { + name: "invalid link file format", + fileName: "invalid.txt.link", + errorMsg: "file is empty or first line is missing", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + file, err := lfs.Open(tc.fileName) + assert.Error(t, err) + assert.Contains(t, err.Error(), tc.errorMsg) + assert.Nil(t, file) + }) + } +} + +// TestLinksFS_WorkDirValidation tests that LinksFS validates workDir is within repository root. +func TestLinksFS_WorkDirValidation(t *testing.T) { + tempDir := t.TempDir() + + // Create repository structure + repoDir := filepath.Join(tempDir, "repo") + err := os.MkdirAll(repoDir, 0755) + require.NoError(t, err) + + // Create directory outside repo + outsideDir := filepath.Join(tempDir, "outside") + err = os.MkdirAll(outsideDir, 0755) + require.NoError(t, err) + + // Create directory inside repo + insideDir := filepath.Join(repoDir, "inside") + err = os.MkdirAll(insideDir, 0755) + require.NoError(t, err) + + root, err := os.OpenRoot(repoDir) + require.NoError(t, err) + t.Cleanup(func() { _ = root.Close() }) + + tests := []struct { + name string + workDir string + expectError bool + errorMsg string + }{ + { + name: "valid relative workDir", + workDir: "inside", + }, + { + name: "valid absolute workDir", + workDir: insideDir, + }, + { + name: "invalid absolute workDir outside repo", + workDir: outsideDir, + expectError: true, + errorMsg: "is outside the repository root", + }, + { + name: "invalid relative workDir escaping repo", + workDir: "../outside", + expectError: true, + errorMsg: "is outside the repository root", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + lfs, err := NewLinksFS(root, tc.workDir) + + if tc.expectError { + assert.Error(t, err) + assert.Contains(t, err.Error(), tc.errorMsg) + assert.Nil(t, lfs) + } else { + assert.NoError(t, err) + assert.NotNil(t, lfs) + } + }) + } +} diff --git a/internal/files/repository.go b/internal/files/repository.go index 9519e08757..d7ae0bf68d 100644 --- a/internal/files/repository.go +++ b/internal/files/repository.go @@ -13,6 +13,21 @@ import ( "gopkg.in/yaml.v3" ) +func FindRepositoryRoot() (*os.Root, error) { + rootPath, err := FindRepositoryRootDirectory() + if err != nil { + return nil, fmt.Errorf("root not found: %w", err) + } + + // scope any possible operation to the repository folder + dirRoot, err := os.OpenRoot(rootPath) + if err != nil { + return nil, fmt.Errorf("could not open root: %w", err) + } + + return dirRoot, nil +} + func FindRepositoryRootDirectory() (string, error) { workDir, err := os.Getwd() if err != nil { diff --git a/internal/files/testdata/links/included.yml b/internal/files/testdata/links/included.yml new file mode 100644 index 0000000000..923ec99b96 --- /dev/null +++ b/internal/files/testdata/links/included.yml @@ -0,0 +1,3 @@ +processors: + - test: + foo: bar \ No newline at end of file diff --git a/internal/files/testdata/links/outdated.yml.link b/internal/files/testdata/links/outdated.yml.link new file mode 100644 index 0000000000..76781e5392 --- /dev/null +++ b/internal/files/testdata/links/outdated.yml.link @@ -0,0 +1 @@ +./included.yml \ No newline at end of file diff --git a/internal/files/testdata/links/uptodate.yml.link b/internal/files/testdata/links/uptodate.yml.link new file mode 100644 index 0000000000..d0a9c517ea --- /dev/null +++ b/internal/files/testdata/links/uptodate.yml.link @@ -0,0 +1 @@ +./included.yml d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e \ No newline at end of file diff --git a/internal/files/testdata/testpackage/included.yml.link b/internal/files/testdata/testpackage/included.yml.link new file mode 100644 index 0000000000..61dbe8caee --- /dev/null +++ b/internal/files/testdata/testpackage/included.yml.link @@ -0,0 +1 @@ +../links/included.yml d709feed45b708c9548a18ca48f3ad4f41be8d3f691f83d7417ca902a20e6c1e \ No newline at end of file diff --git a/internal/files/testdata/testpackage/manifest.yml b/internal/files/testdata/testpackage/manifest.yml new file mode 100644 index 0000000000..cef06e0e0f --- /dev/null +++ b/internal/files/testdata/testpackage/manifest.yml @@ -0,0 +1,20 @@ +format_version: 2.3.0 +name: testpackage +title: "With Includes Tests" +version: 0.0.1 +description: "These are tests of field validation with includes." +type: integration +categories: + - custom +conditions: + kibana.version: "^8.0.0" +policy_templates: + - name: sample + title: Sample logs + description: Collect sample logs + inputs: + - type: logfile + title: Collect sample logs from instances + description: Collecting sample logs +owner: + github: elastic/integrations diff --git a/internal/packages/packages.go b/internal/packages/packages.go index 30d505f949..7000d4382b 100644 --- a/internal/packages/packages.go +++ b/internal/packages/packages.go @@ -239,18 +239,22 @@ func MustFindPackageRoot() (string, error) { return root, nil } -// FindPackageRoot finds and returns the path to the root folder of a package. +// FindPackageRoot finds and returns the path to the root folder of a package from the working directory. func FindPackageRoot() (string, bool, error) { workDir, err := os.Getwd() if err != nil { return "", false, fmt.Errorf("locating working directory failed: %w", err) } + return FindPackageRootFrom(workDir) +} +// FindPackageRootFrom finds and returns the path to the root folder of a package from a given directory. +func FindPackageRootFrom(fromDir string) (string, bool, error) { // VolumeName() will return something like "C:" in Windows, and "" in other OSs // rootDir will be something like "C:\" in Windows, and "/" everywhere else. - rootDir := filepath.VolumeName(workDir) + string(filepath.Separator) + rootDir := filepath.VolumeName(fromDir) + string(filepath.Separator) - dir := workDir + dir := fromDir for dir != "." { path := filepath.Join(dir, PackageManifestFile) fileInfo, err := os.Stat(path) diff --git a/test/packages/other/with_links/_dev/build/build.yml b/test/packages/other/with_links/_dev/build/build.yml new file mode 100644 index 0000000000..8a08f65dea --- /dev/null +++ b/test/packages/other/with_links/_dev/build/build.yml @@ -0,0 +1,4 @@ +dependencies: + ecs: + reference: git@v8.5.2 + import_mappings: true diff --git a/test/packages/other/with_links/_dev/build/docs/README.md b/test/packages/other/with_links/_dev/build/docs/README.md new file mode 100644 index 0000000000..591d5fa57c --- /dev/null +++ b/test/packages/other/with_links/_dev/build/docs/README.md @@ -0,0 +1,9 @@ +# Imported Mappings Tests + +{{event "first"}} + +{{fields "first"}} + +{{event "second"}} + +{{fields "second"}} \ No newline at end of file diff --git a/test/packages/other/with_links/_dev/shared/stream.yml.hbs b/test/packages/other/with_links/_dev/shared/stream.yml.hbs new file mode 100644 index 0000000000..5845510de8 --- /dev/null +++ b/test/packages/other/with_links/_dev/shared/stream.yml.hbs @@ -0,0 +1,7 @@ +paths: +{{#each paths as |path i|}} + - {{path}} +{{/each}} +exclude_files: [".gz$"] +processors: + - add_locale: ~ diff --git a/test/packages/other/with_links/changelog.yml b/test/packages/other/with_links/changelog.yml new file mode 100644 index 0000000000..bb0320a524 --- /dev/null +++ b/test/packages/other/with_links/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.0.1" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/1 # FIXME Replace with the real PR link diff --git a/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log new file mode 100644 index 0000000000..c8c9ffe960 --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log @@ -0,0 +1 @@ +1.2.3.4 - - [25/Oct/2016:14:49:34 +0200] "GET /favicon.ico HTTP/1.1" 404 571 "http://localhost:8080/" "skip-this-one/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36" \ No newline at end of file diff --git a/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-config.yml b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-config.yml new file mode 100644 index 0000000000..958d74a23e --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-config.yml @@ -0,0 +1,4 @@ +multiline: + first_line_pattern: "^(?:[0-9]{1,3}\\.){3}[0-9]{1,3}" +fields: + "@timestamp": "2020-04-28T11:07:58.223Z" diff --git a/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-expected.json b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-expected.json new file mode 100644 index 0000000000..1c2f884a44 --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/_dev/test/pipeline/test-access-raw.log-expected.json @@ -0,0 +1,5 @@ +{ + "expected": [ + null + ] +} \ No newline at end of file diff --git a/test/packages/other/with_links/data_stream/first/agent/stream/stream.yml.hbs.link b/test/packages/other/with_links/data_stream/first/agent/stream/stream.yml.hbs.link new file mode 100644 index 0000000000..099114d0e3 --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/agent/stream/stream.yml.hbs.link @@ -0,0 +1 @@ +../../../../_dev/shared/stream.yml.hbs 069381d45bffbd532a4af8953766a053e75a2aceebdafdffc2264e800fcd1363 \ No newline at end of file diff --git a/test/packages/other/with_links/data_stream/first/elasticsearch/ingest_pipeline/default.yml.link b/test/packages/other/with_links/data_stream/first/elasticsearch/ingest_pipeline/default.yml.link new file mode 100644 index 0000000000..c8e0005e25 --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/elasticsearch/ingest_pipeline/default.yml.link @@ -0,0 +1 @@ +../../../../../pipeline_tests/data_stream/test/elasticsearch/ingest_pipeline/default.yml f7c5f0c03aca8ef68c379a62447bdafbf0dcf32b1ff2de143fd6878ee01a91ad \ No newline at end of file diff --git a/test/packages/other/with_links/data_stream/first/fields/base-fields.yml b/test/packages/other/with_links/data_stream/first/fields/base-fields.yml new file mode 100644 index 0000000000..7c798f4534 --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: '@timestamp' + type: date + description: Event timestamp. diff --git a/test/packages/other/with_links/data_stream/first/fields/histogram-fields.yml b/test/packages/other/with_links/data_stream/first/fields/histogram-fields.yml new file mode 100644 index 0000000000..128a0cb1ca --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/fields/histogram-fields.yml @@ -0,0 +1,3 @@ +- name: service.status.*.histogram + type: object + object_type: histogram diff --git a/test/packages/other/with_links/data_stream/first/manifest.yml b/test/packages/other/with_links/data_stream/first/manifest.yml new file mode 100644 index 0000000000..979ef29d64 --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/manifest.yml @@ -0,0 +1,13 @@ +title: "First" +type: logs +streams: + - input: logfile + title: Sample logs + description: Collect sample logs + vars: + - name: paths + type: text + title: Paths + multi: true + default: + - /var/log/*.log diff --git a/test/packages/other/with_links/data_stream/first/sample_event.json b/test/packages/other/with_links/data_stream/first/sample_event.json new file mode 100644 index 0000000000..a242024f51 --- /dev/null +++ b/test/packages/other/with_links/data_stream/first/sample_event.json @@ -0,0 +1,26 @@ +{ + "source.geo.location": { + "lat": 1.0, + "lon": "2.0" + }, + "destination.geo.location.lat": 3.0, + "destination.geo.location.lon": 4.0, + "service.status.duration.histogram": { + "counts": [ + 8, + 17, + 8, + 7, + 6, + 2 + ], + "values": [ + 0.1, + 0.25, + 0.35, + 0.4, + 0.45, + 0.5 + ] + } +} \ No newline at end of file diff --git a/test/packages/other/with_links/data_stream/second/agent/stream/stream.yml.hbs.link b/test/packages/other/with_links/data_stream/second/agent/stream/stream.yml.hbs.link new file mode 100644 index 0000000000..099114d0e3 --- /dev/null +++ b/test/packages/other/with_links/data_stream/second/agent/stream/stream.yml.hbs.link @@ -0,0 +1 @@ +../../../../_dev/shared/stream.yml.hbs 069381d45bffbd532a4af8953766a053e75a2aceebdafdffc2264e800fcd1363 \ No newline at end of file diff --git a/test/packages/other/with_links/data_stream/second/elasticsearch/ingest_pipeline/default.yml b/test/packages/other/with_links/data_stream/second/elasticsearch/ingest_pipeline/default.yml new file mode 100644 index 0000000000..81221adf3f --- /dev/null +++ b/test/packages/other/with_links/data_stream/second/elasticsearch/ingest_pipeline/default.yml @@ -0,0 +1,10 @@ +--- +description: Pipeline for processing sample logs +processors: +- set: + field: sample_field + value: "1" +on_failure: +- set: + field: error.message + value: '{{ _ingest.on_failure_message }}' \ No newline at end of file diff --git a/test/packages/other/with_links/data_stream/second/fields/base-fields.yml b/test/packages/other/with_links/data_stream/second/fields/base-fields.yml new file mode 100644 index 0000000000..7c798f4534 --- /dev/null +++ b/test/packages/other/with_links/data_stream/second/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: '@timestamp' + type: date + description: Event timestamp. diff --git a/test/packages/other/with_links/data_stream/second/fields/geo-fields.yml b/test/packages/other/with_links/data_stream/second/fields/geo-fields.yml new file mode 100644 index 0000000000..a618607d34 --- /dev/null +++ b/test/packages/other/with_links/data_stream/second/fields/geo-fields.yml @@ -0,0 +1,2 @@ +- name: destination.geo.location + external: ecs diff --git a/test/packages/other/with_links/data_stream/second/fields/histogram-fields.yml b/test/packages/other/with_links/data_stream/second/fields/histogram-fields.yml new file mode 100644 index 0000000000..128a0cb1ca --- /dev/null +++ b/test/packages/other/with_links/data_stream/second/fields/histogram-fields.yml @@ -0,0 +1,3 @@ +- name: service.status.*.histogram + type: object + object_type: histogram diff --git a/test/packages/other/with_links/data_stream/second/manifest.yml b/test/packages/other/with_links/data_stream/second/manifest.yml new file mode 100644 index 0000000000..979ef29d64 --- /dev/null +++ b/test/packages/other/with_links/data_stream/second/manifest.yml @@ -0,0 +1,13 @@ +title: "First" +type: logs +streams: + - input: logfile + title: Sample logs + description: Collect sample logs + vars: + - name: paths + type: text + title: Paths + multi: true + default: + - /var/log/*.log diff --git a/test/packages/other/with_links/data_stream/second/sample_event.json b/test/packages/other/with_links/data_stream/second/sample_event.json new file mode 100644 index 0000000000..a242024f51 --- /dev/null +++ b/test/packages/other/with_links/data_stream/second/sample_event.json @@ -0,0 +1,26 @@ +{ + "source.geo.location": { + "lat": 1.0, + "lon": "2.0" + }, + "destination.geo.location.lat": 3.0, + "destination.geo.location.lon": 4.0, + "service.status.duration.histogram": { + "counts": [ + 8, + 17, + 8, + 7, + 6, + 2 + ], + "values": [ + 0.1, + 0.25, + 0.35, + 0.4, + 0.45, + 0.5 + ] + } +} \ No newline at end of file diff --git a/test/packages/other/with_links/docs/README.md b/test/packages/other/with_links/docs/README.md new file mode 100644 index 0000000000..6f99a892f2 --- /dev/null +++ b/test/packages/other/with_links/docs/README.md @@ -0,0 +1,85 @@ +# Imported Mappings Tests + +An example event for `first` looks as following: + +```json +{ + "source.geo.location": { + "lat": 1.0, + "lon": "2.0" + }, + "destination.geo.location.lat": 3.0, + "destination.geo.location.lon": 4.0, + "service.status.duration.histogram": { + "counts": [ + 8, + 17, + 8, + 7, + 6, + 2 + ], + "values": [ + 0.1, + 0.25, + 0.35, + 0.4, + 0.45, + 0.5 + ] + } +} +``` + +**Exported fields** + +| Field | Description | Type | +|---|---|---| +| @timestamp | Event timestamp. | date | +| data_stream.dataset | Data stream dataset. | constant_keyword | +| data_stream.namespace | Data stream namespace. | constant_keyword | +| data_stream.type | Data stream type. | constant_keyword | +| service.status.\*.histogram | | object | + + +An example event for `second` looks as following: + +```json +{ + "source.geo.location": { + "lat": 1.0, + "lon": "2.0" + }, + "destination.geo.location.lat": 3.0, + "destination.geo.location.lon": 4.0, + "service.status.duration.histogram": { + "counts": [ + 8, + 17, + 8, + 7, + 6, + 2 + ], + "values": [ + 0.1, + 0.25, + 0.35, + 0.4, + 0.45, + 0.5 + ] + } +} +``` + +**Exported fields** + +| Field | Description | Type | +|---|---|---| +| @timestamp | Event timestamp. | date | +| data_stream.dataset | Data stream dataset. | constant_keyword | +| data_stream.namespace | Data stream namespace. | constant_keyword | +| data_stream.type | Data stream type. | constant_keyword | +| destination.geo.location | Longitude and latitude. | geo_point | +| service.status.\*.histogram | | object | diff --git a/test/packages/other/with_links/manifest.yml b/test/packages/other/with_links/manifest.yml new file mode 100644 index 0000000000..233ea0ae8d --- /dev/null +++ b/test/packages/other/with_links/manifest.yml @@ -0,0 +1,23 @@ +format_version: 3.4.0 +name: with_links +title: "With Links Tests" +version: 0.0.1 +description: "These are tests of field validation with links." +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" +policy_templates: + - name: sample + title: Sample logs + description: Collect sample logs + inputs: + - type: logfile + title: Collect sample logs from instances + description: Collecting sample logs +owner: + github: elastic/integrations + type: elastic +