diff --git a/.golangci.yml b/.golangci.yml index 719177c636..7ea0dd54da 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -16,7 +16,6 @@ linters: - gocognit # Check cognitive complexity - gocritic # Enable additional checks for code issues - godot # Enforce comment formatting - - gofmt # Enforce gofmt style - gofumpt # Enforce consistent formatting - goimports # Enforce import formatting - gosec # Inspect source code for security problems @@ -51,14 +50,6 @@ linters-settings: excludes: - G101 # Look for hard coded credentials - gofmt: - # Simplify code: gofmt -s - simplify: true - # Define indentation - rewrite-rules: - - pattern: "interface{}" - replacement: "any" - cyclop: # Maximum function complexity max-complexity: 15 @@ -104,7 +95,7 @@ linters-settings: - name: add-constant arguments: - maxLitCount: "3" - allowStrs: '""' + allowStrs: '"","error"' allowInts: "0,1,2,3,4" allowFloats: "0.0,0.,1.0,1.,2.0,2." - name: argument-limit @@ -169,6 +160,7 @@ issues: - funlen - revive - gci + - gocognit output: formats: diff --git a/cmd/cmd_utils.go b/cmd/cmd_utils.go index a712f7f991..d55978e4d7 100644 --- a/cmd/cmd_utils.go +++ b/cmd/cmd_utils.go @@ -46,12 +46,6 @@ func processCustomCommands( var command *cobra.Command existingTopLevelCommands := make(map[string]*cobra.Command) - // Build commands and their hierarchy from the alias map - for alias, fullCmd := range atmosConfig.CommandAliases { - parts := strings.Fields(fullCmd) - addCommandWithAlias(RootCmd, alias, parts) - } - if topLevel { existingTopLevelCommands = getTopLevelCommands() } diff --git a/cmd/list.go b/cmd/list.go index 3e95c57919..0a547f4150 100644 --- a/cmd/list.go +++ b/cmd/list.go @@ -4,11 +4,12 @@ import ( "github.com/spf13/cobra" ) -// listCmd commands list stacks and components +// listCmd represents the base list command that provides subcommands for listing +// various Atmos resources like stacks, components, settings, metadata, etc. var listCmd = &cobra.Command{ - Use: "list", - Short: "List available stacks and components", - Long: `Display a list of all available stacks and components defined in your project.`, + Use: "list [command]", + Short: "List Atmos resources and configurations", + Long: "List and display Atmos resources and configurations", FParseErrWhitelist: struct{ UnknownFlags bool }{UnknownFlags: false}, Args: cobra.NoArgs, } diff --git a/cmd/list_metadata.go b/cmd/list_metadata.go new file mode 100644 index 0000000000..b1b4db9448 --- /dev/null +++ b/cmd/list_metadata.go @@ -0,0 +1,97 @@ +package cmd + +import ( + log "github.com/charmbracelet/log" + "github.com/spf13/cobra" + + e "github.com/cloudposse/atmos/internal/exec" + "github.com/cloudposse/atmos/pkg/config" + l "github.com/cloudposse/atmos/pkg/list" + "github.com/cloudposse/atmos/pkg/list/errors" + fl "github.com/cloudposse/atmos/pkg/list/flags" + f "github.com/cloudposse/atmos/pkg/list/format" + u "github.com/cloudposse/atmos/pkg/list/utils" + "github.com/cloudposse/atmos/pkg/schema" + utils "github.com/cloudposse/atmos/pkg/utils" +) + +// listMetadataCmd lists metadata across stacks. +var listMetadataCmd = &cobra.Command{ + Use: "metadata", + Short: "List metadata across stacks", + Long: "List metadata information across all stacks", + Example: "atmos list metadata\n" + + "atmos list metadata --query .component\n" + + "atmos list metadata --format json\n" + + "atmos list metadata --stack '*-{dev,staging}-*'\n" + + "atmos list metadata --stack 'prod-*'", + Run: func(cmd *cobra.Command, args []string) { + // Check Atmos configuration + checkAtmosConfig() + output, err := listMetadata(cmd) + if err != nil { + log.Error("failed to list metadata", "error", err) + return + } + + utils.PrintMessage(output) + }, +} + +func init() { + fl.AddCommonListFlags(listMetadataCmd) + + AddStackCompletion(listMetadataCmd) + + listCmd.AddCommand(listMetadataCmd) +} + +func listMetadata(cmd *cobra.Command) (string, error) { + commonFlags, err := fl.GetCommonListFlags(cmd) + if err != nil { + return "", &errors.QueryError{ + Query: "common flags", + Cause: err, + } + } + + if f.Format(commonFlags.Format) == f.FormatCSV && commonFlags.Delimiter == f.DefaultTSVDelimiter { + commonFlags.Delimiter = f.DefaultCSVDelimiter + } + + // Initialize CLI config + configAndStacksInfo := schema.ConfigAndStacksInfo{} + atmosConfig, err := config.InitCliConfig(configAndStacksInfo, true) + if err != nil { + return "", &errors.InitConfigError{Cause: err} + } + + // Get all stacks + stacksMap, err := e.ExecuteDescribeStacks(atmosConfig, "", nil, nil, nil, false, false, false, false, nil) + if err != nil { + return "", &errors.DescribeStacksError{Cause: err} + } + + // Use .metadata as the default query if none provided + if commonFlags.Query == "" { + commonFlags.Query = ".metadata" + } + + output, err := l.FilterAndListValues(stacksMap, &l.FilterOptions{ + Component: "", + Query: commonFlags.Query, + IncludeAbstract: false, + MaxColumns: commonFlags.MaxColumns, + FormatStr: commonFlags.Format, + Delimiter: commonFlags.Delimiter, + StackPattern: commonFlags.Stack, + }) + if err != nil { + if u.IsNoValuesFoundError(err) { + return "", &errors.NoMetadataFoundError{Query: commonFlags.Query} + } + return "", &errors.MetadataFilteringError{Cause: err} + } + + return output, nil +} diff --git a/cmd/list_settings.go b/cmd/list_settings.go new file mode 100644 index 0000000000..ea585a2b4f --- /dev/null +++ b/cmd/list_settings.go @@ -0,0 +1,91 @@ +package cmd + +import ( + log "github.com/charmbracelet/log" + "github.com/spf13/cobra" + + e "github.com/cloudposse/atmos/internal/exec" + "github.com/cloudposse/atmos/pkg/config" + l "github.com/cloudposse/atmos/pkg/list" + "github.com/cloudposse/atmos/pkg/list/errors" + fl "github.com/cloudposse/atmos/pkg/list/flags" + f "github.com/cloudposse/atmos/pkg/list/format" + u "github.com/cloudposse/atmos/pkg/list/utils" + "github.com/cloudposse/atmos/pkg/schema" + utils "github.com/cloudposse/atmos/pkg/utils" +) + +// listSettingsCmd lists settings across stacks. +var listSettingsCmd = &cobra.Command{ + Use: "settings", + Short: "List settings across stacks", + Long: "List settings configuration across all stacks", + Example: "atmos list settings\n" + + "atmos list settings --query .terraform\n" + + "atmos list settings --format json\n" + + "atmos list settings --stack '*-dev-*'\n" + + "atmos list settings --stack 'prod-*'", + Run: func(cmd *cobra.Command, args []string) { + // Check Atmos configuration + checkAtmosConfig() + output, err := listSettings(cmd) + if err != nil { + log.Error("failed to list settings", "error", err) + return + } + + utils.PrintMessage(output) + }, +} + +func init() { + fl.AddCommonListFlags(listSettingsCmd) + + AddStackCompletion(listSettingsCmd) + + listCmd.AddCommand(listSettingsCmd) +} + +func listSettings(cmd *cobra.Command) (string, error) { + // Get common flags + commonFlags, err := fl.GetCommonListFlags(cmd) + if err != nil { + return "", &errors.CommonFlagsError{Cause: err} + } + + if f.Format(commonFlags.Format) == f.FormatCSV && commonFlags.Delimiter == f.DefaultTSVDelimiter { + commonFlags.Delimiter = f.DefaultCSVDelimiter + } + + // Initialize CLI config + configAndStacksInfo := schema.ConfigAndStacksInfo{} + atmosConfig, err := config.InitCliConfig(configAndStacksInfo, true) + if err != nil { + return "", &errors.InitConfigError{Cause: err} + } + + // Get all stacks + stacksMap, err := e.ExecuteDescribeStacks(atmosConfig, "", nil, nil, nil, false, false, false, false, nil) + if err != nil { + return "", &errors.DescribeStacksError{Cause: err} + } + + // Use empty query to avoid further processing since handleComponentProperties will extract the settings + output, err := l.FilterAndListValues(stacksMap, &l.FilterOptions{ + Component: "settings", + Query: commonFlags.Query, + IncludeAbstract: false, + MaxColumns: commonFlags.MaxColumns, + FormatStr: commonFlags.Format, + Delimiter: commonFlags.Delimiter, + StackPattern: commonFlags.Stack, + }) + if err != nil { + if u.IsNoValuesFoundError(err) { + return "", &errors.NoSettingsFoundError{Query: commonFlags.Query} + } + return "", &errors.SettingsFilteringError{Cause: err} + } + + return output, nil +} diff --git a/cmd/list_stacks.go b/cmd/list_stacks.go index 979da4ca91..9959903a01 100644 --- a/cmd/list_stacks.go +++ b/cmd/list_stacks.go @@ -1,17 +1,16 @@ package cmd import ( - "fmt" "strings" + log "github.com/charmbracelet/log" "github.com/spf13/cobra" e "github.com/cloudposse/atmos/internal/exec" "github.com/cloudposse/atmos/pkg/config" l "github.com/cloudposse/atmos/pkg/list" "github.com/cloudposse/atmos/pkg/schema" - "github.com/cloudposse/atmos/pkg/ui/theme" - u "github.com/cloudposse/atmos/pkg/utils" + "github.com/cloudposse/atmos/pkg/utils" ) // listStacksCmd lists atmos stacks @@ -28,10 +27,10 @@ var listStacksCmd = &cobra.Command{ checkAtmosConfig() output, err := listStacks(cmd) if err != nil { - u.PrintErrorMarkdownAndExit("Error filtering stacks", err, "") + log.Error("error filtering stacks", "error", err) return } - u.PrintMessageInColor(strings.Join(output, "\n")+"\n", theme.Colors.Success) + utils.PrintMessage(strings.Join(output, "\n")) }, } @@ -46,11 +45,13 @@ func listStacks(cmd *cobra.Command) ([]string, error) { configAndStacksInfo := schema.ConfigAndStacksInfo{} atmosConfig, err := config.InitCliConfig(configAndStacksInfo, true) if err != nil { - return nil, fmt.Errorf("Error initializing CLI config: %v", err) + log.Error("failed to initialize CLI config", "error", err) + return nil, err } stacksMap, err := e.ExecuteDescribeStacks(atmosConfig, "", nil, nil, nil, false, false, false, false, nil) if err != nil { - return nil, fmt.Errorf("Error describing stacks: %v", err) + log.Error("failed to describe stacks", "error", err) + return nil, err } output, err := l.FilterAndListStacks(stacksMap, componentFlag) diff --git a/cmd/list_values.go b/cmd/list_values.go new file mode 100644 index 0000000000..e49cec05ae --- /dev/null +++ b/cmd/list_values.go @@ -0,0 +1,159 @@ +package cmd + +import ( + "fmt" + + log "github.com/charmbracelet/log" + "github.com/pkg/errors" + "github.com/spf13/cobra" + + e "github.com/cloudposse/atmos/internal/exec" + "github.com/cloudposse/atmos/pkg/config" + l "github.com/cloudposse/atmos/pkg/list" + fl "github.com/cloudposse/atmos/pkg/list/flags" + f "github.com/cloudposse/atmos/pkg/list/format" + "github.com/cloudposse/atmos/pkg/schema" + u "github.com/cloudposse/atmos/pkg/utils" +) + +var ( + ErrGettingCommonFlags = errors.New("error getting common flags") + ErrGettingAbstractFlag = errors.New("error getting abstract flag") + ErrGettingVarsFlag = errors.New("error getting vars flag") + ErrInitializingCLIConfig = errors.New("error initializing CLI config") + ErrDescribingStacks = errors.New("error describing stacks") +) + +// Error format strings. +const ( + ErrFmtWrapErr = "%w: %v" // Format for wrapping errors. +) + +// listValuesCmd lists component values across stacks. +var listValuesCmd = &cobra.Command{ + Use: "values [component]", + Short: "List component values across stacks", + Long: "List values for a component across all stacks where it is used", + Example: "atmos list values vpc\n" + + "atmos list values vpc --abstract\n" + + "atmos list values vpc --query .vars\n" + + "atmos list values vpc --format json\n" + + "atmos list values vpc --format yaml\n" + + "atmos list values vpc --format csv", + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + if len(args) != 1 { + log.Error("invalid arguments. The command requires one argument 'component'") + return + } + + // Check Atmos configuration + checkAtmosConfig() + output, err := listValues(cmd, args) + if err != nil { + log.Error(err.Error()) + return + } + + u.PrintMessage(output) + }, +} + +// listVarsCmd is an alias for 'list values --query .vars'. +var listVarsCmd = &cobra.Command{ + Use: "vars [component]", + Short: "List component vars across stacks (alias for 'list values --query .vars')", + Long: "List vars for a component across all stacks where it is used", + Example: "atmos list vars vpc\n" + + "atmos list vars vpc --abstract\n" + + "atmos list vars vpc --max-columns 5\n" + + "atmos list vars vpc --format json\n" + + "atmos list vars vpc --format yaml\n" + + "atmos list vars vpc --format csv", + Args: cobra.ExactArgs(1), + Run: func(cmd *cobra.Command, args []string) { + // Set the query flag to .vars + if err := cmd.Flags().Set("query", ".vars"); err != nil { + log.Error("failed to set query flag", "error", err, "component", args[0]) + return + } + // Run the values command + listValuesCmd.Run(cmd, args) + }, +} + +func init() { + // Add common flags + fl.AddCommonListFlags(listValuesCmd) + + // Add additional flags + listValuesCmd.PersistentFlags().Bool("abstract", false, "Include abstract components") + listValuesCmd.PersistentFlags().Bool("vars", false, "Show only vars (equivalent to --query .vars)") + + // Add stack pattern completion + AddStackCompletion(listValuesCmd) + + // Add commands to list command + listCmd.AddCommand(listValuesCmd) + listCmd.AddCommand(listVarsCmd) +} + +func listValues(cmd *cobra.Command, args []string) (string, error) { + // Get common flags + commonFlags, err := fl.GetCommonListFlags(cmd) + if err != nil { + return "", fmt.Errorf(ErrFmtWrapErr, ErrGettingCommonFlags, err) + } + + // Get additional flags + abstractFlag, err := cmd.Flags().GetBool("abstract") + if err != nil { + return "", fmt.Errorf(ErrFmtWrapErr, ErrGettingAbstractFlag, err) + } + + varsFlag, err := cmd.Flags().GetBool("vars") + if err != nil { + return "", fmt.Errorf(ErrFmtWrapErr, ErrGettingVarsFlag, err) + } + + // Set appropriate default delimiter based on format + if f.Format(commonFlags.Format) == f.FormatCSV && commonFlags.Delimiter == f.DefaultTSVDelimiter { + commonFlags.Delimiter = f.DefaultCSVDelimiter + } + + // If vars flag is set, override query + if varsFlag { + commonFlags.Query = ".vars" + } + + component := args[0] + + // Initialize CLI config + configAndStacksInfo := schema.ConfigAndStacksInfo{} + atmosConfig, err := config.InitCliConfig(configAndStacksInfo, true) + if err != nil { + return "", fmt.Errorf(ErrFmtWrapErr, ErrInitializingCLIConfig, err) + } + + // Get all stacks + stacksMap, err := e.ExecuteDescribeStacks(atmosConfig, "", nil, nil, nil, false, false, false, false, nil) + if err != nil { + return "", fmt.Errorf(ErrFmtWrapErr, ErrDescribingStacks, err) + } + + // Filter and list component values across stacks + output, err := l.FilterAndListValues(stacksMap, &l.FilterOptions{ + Component: component, + Query: commonFlags.Query, + IncludeAbstract: abstractFlag, + MaxColumns: commonFlags.MaxColumns, + FormatStr: commonFlags.Format, + Delimiter: commonFlags.Delimiter, + StackPattern: commonFlags.Stack, + }) + if err != nil { + return "", err // Return error directly without wrapping + } + + return output, nil +} diff --git a/cmd/markdown/atmos_list_metadata_usage.md b/cmd/markdown/atmos_list_metadata_usage.md new file mode 100644 index 0000000000..418322b470 --- /dev/null +++ b/cmd/markdown/atmos_list_metadata_usage.md @@ -0,0 +1,28 @@ +– List all metadata +``` + $ atmos list metadata +``` + +– List metadata for specific stacks +``` + $ atmos list metadata --stack '*-dev-*' +``` + +– List specific metadata fields +``` + $ atmos list metadata --query .metadata.component + $ atmos list metadata --query .metadata.type +``` + +– Output in different formats +``` + $ atmos list metadata --format json + $ atmos list metadata --format yaml + $ atmos list metadata --format csv + $ atmos list metadata --format tsv +``` + +– Filter by stack and specific metadata +``` + $ atmos list metadata --stack '*-ue2-*' --query .metadata.version +``` diff --git a/cmd/markdown/atmos_list_settings_usage.md b/cmd/markdown/atmos_list_settings_usage.md new file mode 100644 index 0000000000..28e5fd6a4f --- /dev/null +++ b/cmd/markdown/atmos_list_settings_usage.md @@ -0,0 +1,28 @@ +– List all settings +``` + $ atmos list settings +``` + +– List settings for specific stacks +``` + $ atmos list settings --stack '*-dev-*' +``` + +– List specific settings using query +``` + $ atmos list settings --query .settings.templates + $ atmos list settings --query .settings.validation +``` + +– Output in different formats +``` + $ atmos list settings --format json + $ atmos list settings --format yaml + $ atmos list settings --format csv + $ atmos list settings --format tsv +``` + +– Filter by stack and specific settings +``` + $ atmos list settings --stack '*-ue2-*' --query .settings.templates.gomplate +``` diff --git a/cmd/markdown/atmos_list_values_usage.md b/cmd/markdown/atmos_list_values_usage.md new file mode 100644 index 0000000000..d5f8f6f94f --- /dev/null +++ b/cmd/markdown/atmos_list_values_usage.md @@ -0,0 +1,44 @@ +– List all values for a component +``` + $ atmos list values +``` + +– List only variables for a component +``` + $ atmos list values --query .vars +``` + +– List settings for a specific component in a stack +``` + $ atmos list values --query .settings --stack 'plat-ue2-*' +``` + +– Include abstract components +``` + $ atmos list values --abstract +``` + +– Limit number of columns +``` + $ atmos list values --max-columns 5 +``` + +– Output in different formats +``` + $ atmos list values --format json + $ atmos list values --format yaml + $ atmos list values --format csv + $ atmos list values --format tsv +``` + +– Filter stacks and include abstract components +``` + $ atmos list values --stack '*-prod-*' --abstract +``` + +– Custom query with specific stack pattern +``` + $ atmos list values --query .vars.tags --stack '*-ue2-*' +``` +- Stack patterns support glob matching (e.g., `*-dev-*`, `prod-*`, `*-{dev,staging}-*`) + diff --git a/cmd/markdown/atmos_list_vars_usage.md b/cmd/markdown/atmos_list_vars_usage.md new file mode 100644 index 0000000000..c8a4e9774e --- /dev/null +++ b/cmd/markdown/atmos_list_vars_usage.md @@ -0,0 +1,34 @@ +– List all variables for a component +``` + $ atmos list vars +``` + +– List specific variables using query +``` + $ atmos list vars --query .vars.tags +``` + +– Filter by stack pattern +``` + $ atmos list vars --stack '*-dev-*' +``` + +– Output in different formats +``` + $ atmos list vars --format json + $ atmos list vars --format yaml + $ atmos list vars --format csv + $ atmos list vars --format tsv +``` + +– Include abstract components +``` + $ atmos list vars --abstract +``` + +– Filter by stack and specific variables +``` + $ atmos list vars --stack '*-ue2-*' --query .vars.region +``` +- Stack patterns support glob matching (e.g., `*-dev-*`, `prod-*`, `*-{dev,staging}-*`) + diff --git a/pkg/list/errors/types.go b/pkg/list/errors/types.go new file mode 100644 index 0000000000..9f86a07435 --- /dev/null +++ b/pkg/list/errors/types.go @@ -0,0 +1,140 @@ +package errors + +import ( + "fmt" + "strings" +) + +// NoValuesFoundError represents an error when no values are found for a component. +type NoValuesFoundError struct { + Component string + Query string +} + +func (e *NoValuesFoundError) Error() string { + if e.Query != "" { + return fmt.Sprintf("no values found for component '%s' with query '%s'", e.Component, e.Query) + } + return fmt.Sprintf("no values found for component '%s'", e.Component) +} + +// InvalidFormatError represents an error when an invalid format is specified. +type InvalidFormatError struct { + Format string + Valid []string +} + +func (e *InvalidFormatError) Error() string { + return fmt.Sprintf("invalid format '%s'. Valid formats are: %s", e.Format, strings.Join(e.Valid, ", ")) +} + +// QueryError represents an error when processing a query. +type QueryError struct { + Query string + Cause error +} + +func (e *QueryError) Error() string { + return fmt.Sprintf("error processing query '%s': %v", e.Query, e.Cause) +} + +func (e *QueryError) Unwrap() error { + return e.Cause +} + +// StackPatternError represents an error with stack pattern matching. +type StackPatternError struct { + Pattern string + Cause error +} + +func (e *StackPatternError) Error() string { + return fmt.Sprintf("invalid stack pattern '%s': %v", e.Pattern, e.Cause) +} + +func (e *StackPatternError) Unwrap() error { + return e.Cause +} + +// NoMetadataFoundError represents an error when no metadata is found with a given query. +type NoMetadataFoundError struct { + Query string +} + +func (e *NoMetadataFoundError) Error() string { + return fmt.Sprintf("no metadata found in any stacks with query '%s'", e.Query) +} + +// MetadataFilteringError represents an error when filtering and listing metadata. +type MetadataFilteringError struct { + Cause error +} + +func (e *MetadataFilteringError) Error() string { + return fmt.Sprintf("error filtering and listing metadata: %v", e.Cause) +} + +func (e *MetadataFilteringError) Unwrap() error { + return e.Cause +} + +// CommonFlagsError represents an error getting common flags. +type CommonFlagsError struct { + Cause error +} + +func (e *CommonFlagsError) Error() string { + return fmt.Sprintf("error getting common flags: %v", e.Cause) +} + +func (e *CommonFlagsError) Unwrap() error { + return e.Cause +} + +// InitConfigError represents an error initializing CLI config. +type InitConfigError struct { + Cause error +} + +func (e *InitConfigError) Error() string { + return fmt.Sprintf("error initializing CLI config: %v", e.Cause) +} + +func (e *InitConfigError) Unwrap() error { + return e.Cause +} + +// DescribeStacksError represents an error describing stacks. +type DescribeStacksError struct { + Cause error +} + +func (e *DescribeStacksError) Error() string { + return fmt.Sprintf("error describing stacks: %v", e.Cause) +} + +func (e *DescribeStacksError) Unwrap() error { + return e.Cause +} + +// NoSettingsFoundError represents an error when no settings are found with a given query. +type NoSettingsFoundError struct { + Query string +} + +func (e *NoSettingsFoundError) Error() string { + return fmt.Sprintf("no settings found in any stacks with query '%s'", e.Query) +} + +// SettingsFilteringError represents an error when filtering and listing settings. +type SettingsFilteringError struct { + Cause error +} + +func (e *SettingsFilteringError) Error() string { + return fmt.Sprintf("error filtering and listing settings: %v", e.Cause) +} + +func (e *SettingsFilteringError) Unwrap() error { + return e.Cause +} diff --git a/pkg/list/flags.go b/pkg/list/flags.go new file mode 100644 index 0000000000..b82cc4fc41 --- /dev/null +++ b/pkg/list/flags.go @@ -0,0 +1,76 @@ +package list + +import ( + log "github.com/charmbracelet/log" + "github.com/spf13/cobra" +) + +// CommonListFlags represents the common flags used across list commands. +type CommonListFlags struct { + Query string + MaxColumns int + Format string + Delimiter string + Stack string +} + +// DefaultMaxColumns is the default maximum number of columns to display. +const DefaultMaxColumns = 10 + +// AddCommonListFlags adds the common flags to a command. +func AddCommonListFlags(cmd *cobra.Command) { + cmd.PersistentFlags().String("query", "", "JMESPath query to filter values") + cmd.PersistentFlags().Int("max-columns", DefaultMaxColumns, "Maximum number of columns to display") + cmd.PersistentFlags().String("format", "", "Output format (table, json, yaml, csv, tsv)") + cmd.PersistentFlags().String("delimiter", "\t", "Delimiter for csv/tsv output (default: tab for tsv, comma for csv)") + cmd.PersistentFlags().String("stack", "", "Stack pattern to filter (supports glob patterns, e.g., '*-dev-*', 'prod-*')") +} + +// GetCommonListFlags extracts the common flags from a command. +func GetCommonListFlags(cmd *cobra.Command) (*CommonListFlags, error) { + flags := cmd.Flags() + + query, err := flags.GetString("query") + if err != nil { + log.Error("failed to get query flag", "error", err) + return nil, err + } + + maxColumns, err := flags.GetInt("max-columns") + if err != nil { + log.Error("failed to get max-columns flag", "error", err) + return nil, err + } + + format, err := flags.GetString("format") + if err != nil { + log.Error("failed to get format flag", "error", err) + return nil, err + } + + // Validate format if provided + if err := ValidateValuesFormat(format); err != nil { + log.Error("invalid format", "error", err) + return nil, err + } + + delimiter, err := flags.GetString("delimiter") + if err != nil { + log.Error("failed to get delimiter flag", "error", err) + return nil, err + } + + stack, err := flags.GetString("stack") + if err != nil { + log.Error("failed to get stack flag", "error", err) + return nil, err + } + + return &CommonListFlags{ + Query: query, + MaxColumns: maxColumns, + Format: format, + Delimiter: delimiter, + Stack: stack, + }, nil +} diff --git a/pkg/list/flags/flags.go b/pkg/list/flags/flags.go new file mode 100644 index 0000000000..610698fc1f --- /dev/null +++ b/pkg/list/flags/flags.go @@ -0,0 +1,40 @@ +package flags + +import ( + "github.com/spf13/cobra" +) + +// CommonFlags contains common flags for list commands. +type CommonFlags struct { + Format string + MaxColumns int + Delimiter string + Stack string + Query string +} + +// AddCommonListFlags adds common flags to list commands. +func AddCommonListFlags(cmd *cobra.Command) { + cmd.PersistentFlags().String("format", "", "Output format: table, json, yaml, csv, tsv") + cmd.PersistentFlags().Int("max-columns", 0, "Maximum number of columns to display") + cmd.PersistentFlags().String("delimiter", "", "Delimiter for CSV/TSV output") + cmd.PersistentFlags().String("stack", "", "Stack pattern to filter by") + cmd.PersistentFlags().String("query", "", "Query to filter values (e.g., .vars.region)") +} + +// GetCommonListFlags gets common flags from a command. +func GetCommonListFlags(cmd *cobra.Command) (*CommonFlags, error) { + format, _ := cmd.Flags().GetString("format") + maxColumns, _ := cmd.Flags().GetInt("max-columns") + delimiter, _ := cmd.Flags().GetString("delimiter") + stack, _ := cmd.Flags().GetString("stack") + query, _ := cmd.Flags().GetString("query") + + return &CommonFlags{ + Format: format, + MaxColumns: maxColumns, + Delimiter: delimiter, + Stack: stack, + Query: query, + }, nil +} diff --git a/pkg/list/format/delimited.go b/pkg/list/format/delimited.go new file mode 100644 index 0000000000..231fbe45a7 --- /dev/null +++ b/pkg/list/format/delimited.go @@ -0,0 +1,159 @@ +package format + +import ( + "encoding/json" + "fmt" + "sort" + "strings" + + "github.com/cloudposse/atmos/pkg/utils" +) + +const ( + DefaultCSVDelimiter = "," + DefaultTSVDelimiter = "\t" + ValueKey = "value" +) + +// Format implements the Formatter interface for DelimitedFormatter. +func (f *DelimitedFormatter) Format(data map[string]interface{}, options FormatOptions) (string, error) { + f.setDefaultDelimiter(&options) + + keys := extractSortedKeys(data) + valueKeys := getValueKeysFromStacks(data, keys) + header, rows := f.generateHeaderAndRows(keys, valueKeys, data) + + return f.buildOutput(header, rows, options.Delimiter), nil +} + +// setDefaultDelimiter sets the default delimiter if not specified. +func (f *DelimitedFormatter) setDefaultDelimiter(options *FormatOptions) { + if options.Delimiter == "" { + if f.format == FormatCSV { + options.Delimiter = DefaultCSVDelimiter + } else { + options.Delimiter = DefaultTSVDelimiter + } + } +} + +// extractSortedKeys extracts and sorts the keys from data. +func extractSortedKeys(data map[string]interface{}) []string { + var keys []string + for k := range data { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} + +// getValueKeysFromStacks extracts all possible value keys from the first stack. +func getValueKeysFromStacks(data map[string]interface{}, keys []string) []string { + var valueKeys []string + + for _, stackName := range keys { + if stackData, ok := data[stackName].(map[string]interface{}); ok { + if _, hasValue := stackData[ValueKey]; hasValue { + valueKeys = []string{ValueKey} + break + } + // collect all keys from the map + for k := range stackData { + valueKeys = append(valueKeys, k) + } + break + } + } + sort.Strings(valueKeys) + return valueKeys +} + +// generateHeaderAndRows creates the header and rows for the delimited output. +func (f *DelimitedFormatter) generateHeaderAndRows(keys []string, valueKeys []string, data map[string]interface{}) ([]string, [][]string) { + // Create header + header := []string{"Key"} + header = append(header, keys...) + + var rows [][]string + + // Determine if we have the special case with a "value" key + if len(valueKeys) == 1 && valueKeys[0] == ValueKey { + rows = f.generateValueKeyRows(keys, data) + } else { + rows = f.generatePropertyKeyRows(keys, valueKeys, data) + } + + return header, rows +} + +// generateValueKeyRows creates rows for the special case with a "value" key. +func (f *DelimitedFormatter) generateValueKeyRows(keys []string, data map[string]interface{}) [][]string { + var rows [][]string + // In this special case, we create rows using stack names as the first column + for _, stackName := range keys { + row := []string{stackName} + value := "" + if stackData, ok := data[stackName].(map[string]interface{}); ok { + if val, ok := stackData[ValueKey]; ok { + value = formatValue(val) + } + } + row = append(row, value) + rows = append(rows, row) + } + return rows +} + +// generatePropertyKeyRows creates rows where each row represents a property key with values +// from different stacks as columns. This is different from generateValueKeyRows which handles +// the special case where stacks have a single "value" key. +func (f *DelimitedFormatter) generatePropertyKeyRows(keys []string, valueKeys []string, data map[string]interface{}) [][]string { + var rows [][]string + // Property key case: for each value key, create a row + for _, valueKey := range valueKeys { + row := []string{valueKey} + for _, stackName := range keys { + value := "" + if stackData, ok := data[stackName].(map[string]interface{}); ok { + if val, ok := stackData[valueKey]; ok { + value = formatValue(val) + } + } + row = append(row, value) + } + rows = append(rows, row) + } + return rows +} + +// buildOutput builds the final delimited output string. +func (f *DelimitedFormatter) buildOutput(header []string, rows [][]string, delimiter string) string { + var output strings.Builder + output.WriteString(strings.Join(header, delimiter) + utils.GetLineEnding()) + for _, row := range rows { + output.WriteString(strings.Join(row, delimiter) + utils.GetLineEnding()) + } + return output.String() +} + +// FormatValue converts a value to its string representation. +func formatValue(value interface{}) string { + switch v := value.(type) { + case string: + return v + case []interface{}: + var values []string + for _, item := range v { + values = append(values, fmt.Sprintf("%v", item)) + } + return strings.Join(values, ",") + case map[string]interface{}: + jsonBytes, err := json.Marshal(v) + if err != nil { + return fmt.Sprintf("%v", v) + } + return string(jsonBytes) + default: + return fmt.Sprintf("%v", v) + } +} diff --git a/pkg/list/format/formatter.go b/pkg/list/format/formatter.go new file mode 100644 index 0000000000..5a31d92fce --- /dev/null +++ b/pkg/list/format/formatter.go @@ -0,0 +1,88 @@ +package format + +import ( + "github.com/cloudposse/atmos/pkg/list/errors" +) + +// Format represents the output format type. +type Format string + +const ( + FormatTable Format = "table" + FormatJSON Format = "json" + FormatYAML Format = "yaml" + FormatCSV Format = "csv" + FormatTSV Format = "tsv" +) + +// FormatOptions contains options for formatting output. +type FormatOptions struct { + MaxColumns int + Delimiter string + TTY bool + Format Format +} + +// Formatter defines the interface for formatting output. +type Formatter interface { + Format(data map[string]interface{}, options FormatOptions) (string, error) +} + +// DefaultFormatter provides a base implementation of Formatter. +type DefaultFormatter struct { + format Format +} + +// TableFormatter handles table format output. +type TableFormatter struct { + DefaultFormatter +} + +// JSONFormatter handles JSON format output. +type JSONFormatter struct { + DefaultFormatter +} + +// YAMLFormatter handles YAML format output. +type YAMLFormatter struct { + DefaultFormatter +} + +// DelimitedFormatter handles CSV and TSV format output. +type DelimitedFormatter struct { + DefaultFormatter + format Format +} + +// NewFormatter creates a new formatter for the specified format. +func NewFormatter(format Format) (Formatter, error) { + switch format { + case FormatTable: + return &TableFormatter{DefaultFormatter{format: format}}, nil + case FormatJSON: + return &JSONFormatter{DefaultFormatter{format: format}}, nil + case FormatYAML: + return &YAMLFormatter{DefaultFormatter{format: format}}, nil + case FormatCSV, FormatTSV: + return &DelimitedFormatter{DefaultFormatter: DefaultFormatter{format: format}, format: format}, nil + default: + return nil, &errors.InvalidFormatError{ + Format: string(format), + Valid: []string{string(FormatTable), string(FormatJSON), string(FormatYAML), string(FormatCSV), string(FormatTSV)}, + } + } +} + +// ValidateFormat checks if the provided format is valid. +func ValidateFormat(format string) error { + validFormats := []Format{FormatTable, FormatJSON, FormatYAML, FormatCSV, FormatTSV} + for _, f := range validFormats { + if Format(format) == f { + return nil + } + } + return &errors.InvalidFormatError{ + Format: format, + Valid: []string{string(FormatTable), string(FormatJSON), string(FormatYAML), string(FormatCSV), string(FormatTSV)}, + } +} diff --git a/pkg/list/format/formatter_test.go b/pkg/list/format/formatter_test.go new file mode 100644 index 0000000000..ce19d5198f --- /dev/null +++ b/pkg/list/format/formatter_test.go @@ -0,0 +1,262 @@ +package format + +import ( + "encoding/json" + "strings" + "testing" + + "github.com/cloudposse/atmos/pkg/utils" + "github.com/stretchr/testify/assert" +) + +func TestJSONFormatter(t *testing.T) { + formatter := &JSONFormatter{} + data := map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "test-value", + }, + } + options := FormatOptions{Format: FormatJSON} + + output, err := formatter.Format(data, options) + assert.NoError(t, err) + + // Verify JSON output + var result map[string]interface{} + err = json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + assert.Equal(t, data, result) +} + +func TestYAMLFormatter(t *testing.T) { + formatter := &YAMLFormatter{} + data := map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "test-value", + }, + } + options := FormatOptions{Format: FormatYAML} + + output, err := formatter.Format(data, options) + assert.NoError(t, err) + assert.Contains(t, output, "stack1:") + assert.Contains(t, output, "value: test-value") +} + +func TestDelimitedFormatter(t *testing.T) { + tests := []struct { + name string + format Format + data map[string]interface{} + options FormatOptions + expected []string + }{ + { + name: "CSV format", + format: FormatCSV, + data: map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "test-value", + }, + }, + options: FormatOptions{ + Format: FormatCSV, + Delimiter: DefaultCSVDelimiter, + }, + expected: []string{ + "Key,stack1", + "stack1,test-value", + }, + }, + { + name: "TSV format", + format: FormatTSV, + data: map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "test-value", + }, + }, + options: FormatOptions{ + Format: FormatTSV, + Delimiter: DefaultTSVDelimiter, + }, + expected: []string{ + "Key\tstack1", + "stack1\ttest-value", + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + formatter := &DelimitedFormatter{format: test.format} + output, err := formatter.Format(test.data, test.options) + assert.NoError(t, err) + + lines := strings.Split(strings.TrimSpace(output), utils.GetLineEnding()) + assert.Equal(t, test.expected, lines) + }) + } +} + +func TestTableFormatter(t *testing.T) { + tests := []struct { + name string + data map[string]interface{} + options FormatOptions + expected []string + }{ + { + name: "TTY output", + data: map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "test-value", + }, + }, + options: FormatOptions{ + Format: FormatTable, + TTY: true, + }, + expected: []string{ + "Key", + "stack1", + "test-value", + }, + }, + { + name: "Non-TTY output", + data: map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "test-value", + }, + }, + options: FormatOptions{ + Format: FormatTable, + TTY: false, + Delimiter: DefaultCSVDelimiter, + }, + expected: []string{ + "Key,stack1", + "stack1,test-value", + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + formatter := &TableFormatter{} + output, err := formatter.Format(test.data, test.options) + assert.NoError(t, err) + + for _, expected := range test.expected { + assert.Contains(t, output, expected) + } + }) + } +} + +func TestNewFormatter(t *testing.T) { + tests := []struct { + name string + format Format + expectError bool + }{ + { + name: "JSON formatter", + format: FormatJSON, + expectError: false, + }, + { + name: "YAML formatter", + format: FormatYAML, + expectError: false, + }, + { + name: "CSV formatter", + format: FormatCSV, + expectError: false, + }, + { + name: "TSV formatter", + format: FormatTSV, + expectError: false, + }, + { + name: "Table formatter", + format: FormatTable, + expectError: false, + }, + { + name: "Invalid formatter", + format: "invalid", + expectError: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + formatter, err := NewFormatter(test.format) + + if test.expectError { + assert.Error(t, err) + assert.Nil(t, formatter) + return + } + + assert.NoError(t, err) + assert.NotNil(t, formatter) + }) + } +} + +func TestValidateFormat(t *testing.T) { + tests := []struct { + name string + format string + expectError bool + }{ + { + name: "Valid JSON format", + format: string(FormatJSON), + expectError: false, + }, + { + name: "Valid YAML format", + format: string(FormatYAML), + expectError: false, + }, + { + name: "Valid CSV format", + format: string(FormatCSV), + expectError: false, + }, + { + name: "Valid TSV format", + format: string(FormatTSV), + expectError: false, + }, + { + name: "Valid Table format", + format: string(FormatTable), + expectError: false, + }, + { + name: "Invalid format", + format: "invalid", + expectError: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := ValidateFormat(test.format) + + if test.expectError { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + }) + } +} diff --git a/pkg/list/format/json.go b/pkg/list/format/json.go new file mode 100644 index 0000000000..0c381675a1 --- /dev/null +++ b/pkg/list/format/json.go @@ -0,0 +1,15 @@ +package format + +import ( + "encoding/json" + "fmt" +) + +// Format implements the Formatter interface for JSONFormatter. +func (f *JSONFormatter) Format(data map[string]interface{}, options FormatOptions) (string, error) { + jsonBytes, err := json.MarshalIndent(data, "", " ") + if err != nil { + return "", fmt.Errorf("error formatting JSON output: %w", err) + } + return string(jsonBytes), nil +} diff --git a/pkg/list/format/table.go b/pkg/list/format/table.go new file mode 100644 index 0000000000..e27420d92d --- /dev/null +++ b/pkg/list/format/table.go @@ -0,0 +1,267 @@ +package format + +import ( + "encoding/json" + "fmt" + "reflect" + "sort" + + "github.com/charmbracelet/lipgloss" + "github.com/charmbracelet/lipgloss/table" + "github.com/cloudposse/atmos/internal/tui/templates" + "github.com/cloudposse/atmos/pkg/ui/theme" + "github.com/cloudposse/atmos/pkg/utils" + "github.com/pkg/errors" +) + +// Constants for table formatting. +const ( + MaxColumnWidth = 60 // Maximum width for a column. + TableColumnPadding = 3 // Padding for table columns. + DefaultKeyWidth = 15 // Default base width for keys. +) + +// Error variables for table formatting. +var ( + ErrTableTooWide = errors.New("the table is too wide to display properly") +) + +// extractAndSortKeys extracts and sorts the keys from the data map. +func extractAndSortKeys(data map[string]interface{}, maxColumns int) []string { + var keys []string + for k := range data { + keys = append(keys, k) + } + sort.Strings(keys) + + if maxColumns > 0 && len(keys) > maxColumns { + keys = keys[:maxColumns] + } + + return keys +} + +// extractValueKeys extracts value keys from the first stack data. +func extractValueKeys(data map[string]interface{}, stackKeys []string) []string { + var valueKeys []string + for _, stackName := range stackKeys { + if stackData, ok := data[stackName].(map[string]interface{}); ok { + // If it's a simple value map with "value" key, return it. + if _, hasValue := stackData["value"]; hasValue { + return []string{"value"} + } + // Otherwise, collect all keys from the map + for k := range stackData { + valueKeys = append(valueKeys, k) + } + break + } + } + sort.Strings(valueKeys) + return valueKeys +} + +// createHeader creates the table header. +func createHeader(stackKeys []string) []string { + header := []string{"Key"} + return append(header, stackKeys...) +} + +// createRows creates the table rows using value keys and stack keys. +func createRows(data map[string]interface{}, valueKeys, stackKeys []string) [][]string { + var rows [][]string + for _, valueKey := range valueKeys { + row := []string{valueKey} + for _, stackName := range stackKeys { + value := "" + if stackData, ok := data[stackName].(map[string]interface{}); ok { + if val, ok := stackData[valueKey]; ok { + value = formatTableCellValue(val) + } + } + row = append(row, value) + } + rows = append(rows, row) + } + return rows +} + +// formatTableCellValue formats a value for display in a table cell. +// This is different from formatValue as it prioritizes compact display over completeness. +func formatTableCellValue(val interface{}) string { + if val == nil { + return "" + } + + // Handle string values directly + if str, ok := val.(string); ok { + return truncateString(str) + } + + // Handle different types with summary information + v := reflect.ValueOf(val) + switch v.Kind() { + case reflect.Map, reflect.Array, reflect.Slice: + return formatCollectionValue(v) + case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return fmt.Sprintf("%v", val) + case reflect.Float32, reflect.Float64: + return fmt.Sprintf("%.2f", val) + default: + return formatComplexValue(val) + } +} + +// truncateString truncates a string if it's longer than MaxColumnWidth. +func truncateString(str string) string { + if len(str) > MaxColumnWidth { + return str[:MaxColumnWidth-3] + "..." + } + return str +} + +// formatCollectionValue formats maps, arrays and slices for display. +func formatCollectionValue(v reflect.Value) string { + count := v.Len() + switch v.Kind() { + case reflect.Map: + return fmt.Sprintf("{...} (%d keys)", count) + case reflect.Array, reflect.Slice: + return fmt.Sprintf("[...] (%d items)", count) + default: + return "{unknown collection}" + } +} + +// formatComplexValue formats complex values using JSON. +func formatComplexValue(val interface{}) string { + jsonBytes, err := json.Marshal(val) + if err != nil { + return "{complex value}" + } + return truncateString(string(jsonBytes)) +} + +// createStyledTable creates a styled table with headers and rows. +func createStyledTable(header []string, rows [][]string) string { + t := table.New(). + Border(lipgloss.ThickBorder()). + BorderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color(theme.ColorBorder))). + StyleFunc(func(row, col int) lipgloss.Style { + style := lipgloss.NewStyle().PaddingLeft(1).PaddingRight(1) + if row == -1 { + return style. + Foreground(lipgloss.Color(theme.ColorGreen)). + Bold(true). + Align(lipgloss.Center) + } + return style.Inherit(theme.Styles.Description) + }). + Headers(header...). + Rows(rows...) + + return t.String() + utils.GetLineEnding() +} + +// Format implements the Formatter interface for TableFormatter. +func (f *TableFormatter) Format(data map[string]interface{}, options FormatOptions) (string, error) { + if !options.TTY { + // to ensure consistency + csvFormatter := &DelimitedFormatter{format: FormatCSV} + return csvFormatter.Format(data, options) + } + + // Get stack keys + stackKeys := extractAndSortKeys(data, options.MaxColumns) + valueKeys := extractValueKeys(data, stackKeys) + + // Estimate table width before creating it + estimatedWidth := calculateEstimatedTableWidth(data, valueKeys, stackKeys) + terminalWidth := templates.GetTerminalWidth() + + // Check if the table would be too wide + if estimatedWidth > terminalWidth { + return "", errors.Errorf("%s (width: %d > %d).\n\nSuggestions:\n- Use --stack to select specific stacks (examples: --stack 'plat-ue2-dev')\n- Use --query to select specific settings (example: --query '.vpc.validation')\n- Use --format json or --format yaml for complete data viewing", + ErrTableTooWide.Error(), estimatedWidth, terminalWidth) + } + + header := createHeader(stackKeys) + rows := createRows(data, valueKeys, stackKeys) + + return createStyledTable(header, rows), nil +} + +// calculateMaxKeyWidth determines the maximum width needed for the key column. +func calculateMaxKeyWidth(valueKeys []string) int { + maxKeyWidth := DefaultKeyWidth // Base width assumption + for _, key := range valueKeys { + if len(key) > maxKeyWidth { + maxKeyWidth = len(key) + } + } + return maxKeyWidth +} + +// limitWidth ensures a width doesn't exceed MaxColumnWidth. +func limitWidth(width int) int { + if width > MaxColumnWidth { + return MaxColumnWidth + } + return width +} + +// getMaxValueWidth returns the maximum formatted value width in a column. +func getMaxValueWidth(stackData map[string]interface{}, valueKeys []string) int { + maxWidth := 0 + + for _, valueKey := range valueKeys { + if val, ok := stackData[valueKey]; ok { + formattedValue := formatTableCellValue(val) + valueWidth := len(formattedValue) + + if valueWidth > maxWidth { + maxWidth = valueWidth + } + } + } + + return limitWidth(maxWidth) +} + +// calculateStackColumnWidth calculates the width for a single stack column. +func calculateStackColumnWidth(stackName string, stackData map[string]interface{}, valueKeys []string) int { + // Start with the width based on stack name + columnWidth := limitWidth(len(stackName)) + + // Check value widths + valueWidth := getMaxValueWidth(stackData, valueKeys) + if valueWidth > columnWidth { + columnWidth = valueWidth + } + + return columnWidth +} + +// calculateEstimatedTableWidth estimates the width of the table based on content. +func calculateEstimatedTableWidth(data map[string]interface{}, valueKeys, stackKeys []string) int { + // Calculate key column width + maxKeyWidth := calculateMaxKeyWidth(valueKeys) + totalWidth := maxKeyWidth + TableColumnPadding + + // Calculate width for each stack column + for _, stackName := range stackKeys { + var columnWidth int + + if stackData, ok := data[stackName].(map[string]interface{}); ok { + columnWidth = calculateStackColumnWidth(stackName, stackData, valueKeys) + } else { + // If no stack data, just use the stack name width + columnWidth = limitWidth(len(stackName)) + } + + totalWidth += columnWidth + TableColumnPadding + } + + return totalWidth +} diff --git a/pkg/list/format/yaml.go b/pkg/list/format/yaml.go new file mode 100644 index 0000000000..d53014f11b --- /dev/null +++ b/pkg/list/format/yaml.go @@ -0,0 +1,16 @@ +package format + +import ( + "fmt" + + "github.com/cloudposse/atmos/pkg/utils" +) + +// Format implements the Formatter interface for YAMLFormatter. +func (f *YAMLFormatter) Format(data map[string]interface{}, options FormatOptions) (string, error) { + yamlBytes, err := utils.ConvertToYAML(data) + if err != nil { + return "", fmt.Errorf("error formatting YAML output: %w", err) + } + return yamlBytes, nil +} diff --git a/pkg/list/list_values.go b/pkg/list/list_values.go new file mode 100644 index 0000000000..adfe0b5193 --- /dev/null +++ b/pkg/list/list_values.go @@ -0,0 +1,156 @@ +package list + +import ( + "path/filepath" + "sort" + + "github.com/cloudposse/atmos/internal/tui/templates/term" + listerrors "github.com/cloudposse/atmos/pkg/list/errors" + "github.com/cloudposse/atmos/pkg/list/format" + "github.com/cloudposse/atmos/pkg/list/values" + "github.com/pkg/errors" +) + +// Error variables for list_values package. +var ( + ErrInvalidStackPattern = errors.New("invalid stack pattern") +) + +// FilterOptions contains the options for filtering and listing component values. +type FilterOptions struct { + Component string + Query string + IncludeAbstract bool + MaxColumns int + FormatStr string + Delimiter string + StackPattern string +} + +// FilterAndListValues filters and lists component values across stacks. +func FilterAndListValues(stacksMap map[string]interface{}, options *FilterOptions) (string, error) { + // Set default format if not specified + if options.FormatStr == "" { + options.FormatStr = string(format.FormatTable) + } + + if err := format.ValidateFormat(options.FormatStr); err != nil { + return "", err + } + + // Extract stack values + extractedValues, err := extractComponentValues(stacksMap, options.Component, options.IncludeAbstract) + if err != nil { + return "", err + } + + // Apply filters + filteredValues, err := applyFilters(extractedValues, options.StackPattern, options.MaxColumns) + if err != nil { + return "", err + } + + // Apply query to values + queriedValues, err := applyQuery(filteredValues, options.Query) + if err != nil { + return "", err + } + + // Format the output + return formatOutput(queriedValues, options.FormatStr, options.Delimiter, options.MaxColumns) +} + +// extractComponentValues extracts the component values from all stacks. +func extractComponentValues(stacksMap map[string]interface{}, component string, includeAbstract bool) (map[string]interface{}, error) { + extractor := values.NewDefaultExtractor() + return extractor.ExtractStackValues(stacksMap, component, includeAbstract) +} + +// applyFilters applies stack pattern and column limits to the values. +func applyFilters(extractedValues map[string]interface{}, stackPattern string, maxColumns int) (map[string]interface{}, error) { + // Apply stack pattern filter + filteredByPattern, err := filterByStackPattern(extractedValues, stackPattern) + if err != nil { + return nil, err + } + + // Apply column limit + return limitColumns(filteredByPattern, maxColumns), nil +} + +// filterByStackPattern filters values by a glob pattern. +func filterByStackPattern(values map[string]interface{}, pattern string) (map[string]interface{}, error) { + if pattern == "" { + return values, nil + } + + filteredValues := make(map[string]interface{}) + for stackName, value := range values { + matched, err := filepath.Match(pattern, stackName) + if err != nil { + return nil, errors.Errorf("invalid stack pattern '%s'", pattern) + } + if matched { + filteredValues[stackName] = value + } + } + return filteredValues, nil +} + +// limitColumns limits the number of columns in the output. +func limitColumns(values map[string]interface{}, maxColumns int) map[string]interface{} { + if maxColumns <= 0 { + return values + } + + limitedValues := make(map[string]interface{}) + var sortedKeys []string + for stackName := range values { + sortedKeys = append(sortedKeys, stackName) + } + sort.Strings(sortedKeys) + + count := len(sortedKeys) + if count > maxColumns { + count = maxColumns + } + + for i := 0; i < count; i++ { + limitedValues[sortedKeys[i]] = values[sortedKeys[i]] + } + return limitedValues +} + +// applyQuery applies a query to the filtered values. +func applyQuery(filteredValues map[string]interface{}, query string) (map[string]interface{}, error) { + extractor := values.NewDefaultExtractor() + return extractor.ApplyValueQuery(filteredValues, query) +} + +// formatOutput formats the output based on the specified format. +func formatOutput(values map[string]interface{}, formatStr, delimiter string, maxColumns int) (string, error) { + formatter, err := format.NewFormatter(format.Format(formatStr)) + if err != nil { + return "", err + } + + options := format.FormatOptions{ + MaxColumns: maxColumns, + Delimiter: delimiter, + TTY: term.IsTTYSupportForStdout(), + Format: format.Format(formatStr), + } + + return formatter.Format(values, options) +} + +// IsNoValuesFoundError checks if an error is a NoValuesFoundError. +func IsNoValuesFoundError(err error) bool { + _, ok := err.(*listerrors.NoValuesFoundError) + return ok +} + +// ValidateFormat validates the output format. +func ValidateValuesFormat(formatStr string) error { + return format.ValidateFormat(formatStr) +} diff --git a/pkg/list/list_values_test.go b/pkg/list/list_values_test.go new file mode 100644 index 0000000000..c0e2f6b1cf --- /dev/null +++ b/pkg/list/list_values_test.go @@ -0,0 +1,355 @@ +package list + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFilterAndListValues(t *testing.T) { + // Mock stacks data + stacksMap := map[string]interface{}{ + "staging": map[string]interface{}{ + "components": map[string]interface{}{ + "terraform": map[string]interface{}{ + "vpc": map[string]interface{}{ + "vars": map[string]interface{}{ + "environment": "staging", + "region": "us-east-1", + "cidr_block": "10.1.0.0/16", + "tags": map[string]interface{}{ + "Environment": "staging", + "Team": "devops", + }, + "subnets": []interface{}{ + "10.1.1.0/24", + "10.1.2.0/24", + }, + }, + }, + }, + }, + "settings": map[string]interface{}{ + "environment": map[string]interface{}{ + "name": "staging", + "region": "us-east-1", + }, + }, + "metadata": map[string]interface{}{ + "team": "platform", + "version": "1.0.0", + }, + }, + "dev": map[string]interface{}{ + "components": map[string]interface{}{ + "terraform": map[string]interface{}{ + "vpc": map[string]interface{}{ + "vars": map[string]interface{}{ + "environment": "dev", + "region": "us-east-1", + "cidr_block": "10.0.0.0/16", + "tags": map[string]interface{}{ + "Environment": "dev", + "Team": "devops", + }, + "subnets": []interface{}{ + "10.0.1.0/24", + "10.0.2.0/24", + }, + }, + }, + }, + }, + "settings": map[string]interface{}{ + "environment": map[string]interface{}{ + "name": "dev", + "region": "us-east-1", + }, + }, + "metadata": map[string]interface{}{ + "team": "platform", + "version": "1.0.0", + }, + }, + "prod": map[string]interface{}{ + "components": map[string]interface{}{ + "terraform": map[string]interface{}{ + "vpc": map[string]interface{}{ + "abstract": true, + "vars": map[string]interface{}{ + "environment": "prod", + "region": "us-east-1", + "cidr_block": "10.2.0.0/16", + "tags": map[string]interface{}{ + "Environment": "prod", + "Team": "devops", + }, + "subnets": []interface{}{ + "10.2.1.0/24", + "10.2.2.0/24", + }, + }, + }, + }, + }, + "settings": map[string]interface{}{ + "environment": map[string]interface{}{ + "name": "prod", + "region": "us-east-1", + }, + }, + "metadata": map[string]interface{}{ + "team": "platform", + "version": "1.0.0", + }, + }, + } + + tests := []struct { + name string + component string + query string + includeAbstract bool + maxColumns int + format string + delimiter string + stackPattern string + expectError bool + expectedError string + checkFunc func(t *testing.T, output string) + }{ + { + name: "basic table format", + component: "vpc", + format: "", + checkFunc: func(t *testing.T, output string) { + assert.Contains(t, output, "environment") + assert.Contains(t, output, "region") + assert.Contains(t, output, "cidr_block") + assert.Contains(t, output, "dev") + assert.Contains(t, output, "staging") + assert.NotContains(t, output, "prod") // Abstract component + }, + }, + { + name: "include abstract components", + component: "vpc", + includeAbstract: true, + format: "json", // Changed to JSON to avoid terminal width issues + checkFunc: func(t *testing.T, output string) { + assert.Contains(t, output, "prod") + }, + }, + { + name: "json format", + component: "vpc", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + assert.Contains(t, result, "dev") + assert.Contains(t, result, "staging") + }, + }, + { + name: "yaml format", + component: "vpc", + format: "yaml", + checkFunc: func(t *testing.T, output string) { + // YAML format should contain the environment values + assert.Contains(t, output, "dev:") + assert.Contains(t, output, "staging:") + assert.Contains(t, output, "environment: dev") + assert.Contains(t, output, "environment: staging") + assert.Contains(t, output, "cidr_block:") + }, + }, + { + name: "csv format", + component: "vpc", + format: "csv", + delimiter: ",", + checkFunc: func(t *testing.T, output string) { + assert.Contains(t, output, "Key,dev,staging") + assert.Contains(t, output, "environment,dev,staging") + }, + }, + { + name: "tsv format", + component: "vpc", + format: "tsv", + delimiter: "\t", + checkFunc: func(t *testing.T, output string) { + assert.Contains(t, output, "Key\tdev\tstaging") + assert.Contains(t, output, "environment\tdev\tstaging") + }, + }, + { + name: "max columns", + component: "vpc", + maxColumns: 1, + format: "", + checkFunc: func(t *testing.T, output string) { + assert.Contains(t, output, "dev") + assert.NotContains(t, output, "staging") + }, + }, + { + name: "invalid format", + component: "vpc", + format: "invalid", + expectError: true, + }, + { + name: "component not found", + component: "nonexistent", + expectError: true, + expectedError: "no values found for component 'nonexistent'", + }, + { + name: "stack pattern matching", + component: "vpc", + stackPattern: "dev*", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + assert.Contains(t, result, "dev") + assert.NotContains(t, result, "staging") + assert.NotContains(t, result, "prod") + }, + }, + { + name: "settings component", + component: "settings", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + for _, env := range []string{"dev", "staging", "prod"} { + envData, ok := result[env].(map[string]interface{}) + assert.True(t, ok) + envSettings, ok := envData["environment"].(map[string]interface{}) + assert.True(t, ok) + assert.Contains(t, envSettings, "name") + assert.Contains(t, envSettings, "region") + } + }, + }, + { + name: "metadata component", + component: "metadata", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + for _, env := range []string{"dev", "staging", "prod"} { + envData, ok := result[env].(map[string]interface{}) + assert.True(t, ok) + assert.Equal(t, "platform", envData["team"]) + assert.Equal(t, "1.0.0", envData["version"]) + } + }, + }, + { + name: "query filtering - nested map", + component: "vpc", + query: ".tags", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + for _, env := range []string{"dev", "staging"} { + envData, ok := result[env].(map[string]interface{}) + assert.True(t, ok) + value, ok := envData["value"].(map[string]interface{}) + assert.True(t, ok) + assert.Equal(t, "devops", value["Team"]) + } + }, + }, + { + name: "query filtering - array", + component: "vpc", + query: ".subnets", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + devData, ok := result["dev"].(map[string]interface{}) + assert.True(t, ok) + value, ok := devData["value"].(string) + assert.True(t, ok) + assert.Contains(t, value, "10.0.1.0/24") + assert.Contains(t, value, "10.0.2.0/24") + }, + }, + { + name: "settings with query", + component: "settings", + query: ".environment.name", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + for env, expected := range map[string]string{"dev": "dev", "staging": "staging", "prod": "prod"} { + envData, ok := result[env].(map[string]interface{}) + assert.True(t, ok) + assert.Equal(t, expected, envData["value"]) + } + }, + }, + { + name: "metadata with query", + component: "metadata", + query: ".team", + format: "json", + checkFunc: func(t *testing.T, output string) { + var result map[string]interface{} + err := json.Unmarshal([]byte(output), &result) + assert.NoError(t, err) + for _, env := range []string{"dev", "staging", "prod"} { + envData, ok := result[env].(map[string]interface{}) + assert.True(t, ok) + assert.Equal(t, "platform", envData["value"]) + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + output, err := FilterAndListValues(stacksMap, &FilterOptions{ + Component: tt.component, + Query: tt.query, + IncludeAbstract: tt.includeAbstract, + MaxColumns: tt.maxColumns, + FormatStr: tt.format, + Delimiter: tt.delimiter, + StackPattern: tt.stackPattern, + }) + + if tt.expectError { + assert.Error(t, err) + if tt.expectedError != "" { + assert.Equal(t, tt.expectedError, err.Error()) + } + return + } + + require.NoError(t, err) + if tt.checkFunc != nil { + tt.checkFunc(t, output) + } + }) + } +} diff --git a/pkg/list/list_workflows.go b/pkg/list/list_workflows.go index 7d020cbb20..68d1864c3b 100644 --- a/pkg/list/list_workflows.go +++ b/pkg/list/list_workflows.go @@ -22,7 +22,9 @@ import ( const ( FormatTable = "table" FormatJSON = "json" + FormatYAML = "yaml" FormatCSV = "csv" + FormatTSV = "tsv" ) // ValidateFormat checks if the given format is supported @@ -30,7 +32,7 @@ func ValidateFormat(format string) error { if format == "" { return nil } - validFormats := []string{FormatTable, FormatJSON, FormatCSV} + validFormats := []string{FormatTable, FormatJSON, FormatYAML, FormatCSV, FormatTSV} for _, f := range validFormats { if format == f { return nil diff --git a/pkg/list/list_workflows_test.go b/pkg/list/list_workflows_test.go index 9ff48c1e0e..0d35b3d363 100644 --- a/pkg/list/list_workflows_test.go +++ b/pkg/list/list_workflows_test.go @@ -41,6 +41,11 @@ func TestValidateFormat(t *testing.T) { format: "csv", wantErr: false, }, + { + name: "valid yaml format", + format: "yaml", + wantErr: false, + }, { name: "invalid format", format: "invalid", @@ -68,7 +73,7 @@ func TestListWorkflows(t *testing.T) { // Create workflow directory structure workflowsDir := filepath.Join(tmpDir, "stacks", "workflows") - err = os.MkdirAll(workflowsDir, 0755) + err = os.MkdirAll(workflowsDir, 0o755) require.NoError(t, err) // Create atmos.yaml with workflow configuration @@ -81,7 +86,7 @@ stacks: workflows: base_path: "stacks/workflows" ` - err = os.WriteFile(filepath.Join(tmpDir, "atmos.yaml"), []byte(atmosConfig), 0644) + err = os.WriteFile(filepath.Join(tmpDir, "atmos.yaml"), []byte(atmosConfig), 0o600) require.NoError(t, err) // Create an empty workflow file @@ -92,7 +97,7 @@ workflows: } emptyWorkflowBytes, err := yaml.Marshal(emptyWorkflow) require.NoError(t, err) - err = os.WriteFile(emptyWorkflowFile, emptyWorkflowBytes, 0644) + err = os.WriteFile(emptyWorkflowFile, emptyWorkflowBytes, 0o600) require.NoError(t, err) // Create a networking workflow file @@ -110,7 +115,7 @@ workflows: } networkingWorkflowBytes, err := yaml.Marshal(networkingWorkflow) require.NoError(t, err) - err = os.WriteFile(networkingWorkflowFile, networkingWorkflowBytes, 0644) + err = os.WriteFile(networkingWorkflowFile, networkingWorkflowBytes, 0o600) require.NoError(t, err) // Create a validation workflow file @@ -128,7 +133,7 @@ workflows: } validationWorkflowBytes, err := yaml.Marshal(validationWorkflow) require.NoError(t, err) - err = os.WriteFile(validationWorkflowFile, validationWorkflowBytes, 0644) + err = os.WriteFile(validationWorkflowFile, validationWorkflowBytes, 0o600) require.NoError(t, err) // Change to the temporary directory for testing @@ -302,7 +307,7 @@ func TestListWorkflowsWithFile(t *testing.T) { } testWorkflowBytes, err := yaml.Marshal(testWorkflow) require.NoError(t, err) - err = os.WriteFile(testWorkflowFile, testWorkflowBytes, 0644) + err = os.WriteFile(testWorkflowFile, testWorkflowBytes, 0o600) require.NoError(t, err) listConfig := schema.ListConfig{ diff --git a/pkg/list/utils/utils.go b/pkg/list/utils/utils.go new file mode 100644 index 0000000000..28d041e8bf --- /dev/null +++ b/pkg/list/utils/utils.go @@ -0,0 +1,20 @@ +package utils + +import ( + "github.com/cloudposse/atmos/pkg/list/errors" +) + +// IsNoValuesFoundError checks if an error is a NoValuesFoundError. +func IsNoValuesFoundError(err error) bool { + _, ok := err.(*errors.NoValuesFoundError) + return ok +} + +// Common flag names and descriptions. +const ( + FlagFormat = "format" + FlagMaxColumns = "max-columns" + FlagDelimiter = "delimiter" + FlagStack = "stack" + FlagQuery = "query" +) diff --git a/pkg/list/values/extract.go b/pkg/list/values/extract.go new file mode 100644 index 0000000000..0a86f9fdcc --- /dev/null +++ b/pkg/list/values/extract.go @@ -0,0 +1,308 @@ +package values + +import ( + "fmt" + "strconv" + "strings" + + "github.com/cloudposse/atmos/pkg/list/errors" + "github.com/cloudposse/atmos/pkg/utils" +) + +// handleSpecialComponent processes special components like settings and metadata. +func handleSpecialComponent(stack map[string]interface{}, component string) (map[string]interface{}, bool) { + // First check if the component exists at the top level + if section, ok := stack[component].(map[string]interface{}); ok { + return section, true + } + + // If not found at the top level and component is "settings", look for it in components + if component == "settings" { + return extractSettingsFromComponents(stack) + } + + return nil, false +} + +// extractSettingsFromComponents extracts settings from terraform components. +func extractSettingsFromComponents(stack map[string]interface{}) (map[string]interface{}, bool) { + allSettings := make(map[string]interface{}) + + // Try to navigate to terraform components + components, ok := stack["components"].(map[string]interface{}) + if !ok { + return nil, false + } + + terraform, ok := components["terraform"].(map[string]interface{}) + if !ok { + return nil, false + } + + // Collect settings from all terraform components + for componentName, componentData := range terraform { + if settings := extractComponentSettings(componentData); settings != nil { + allSettings[componentName] = settings + } + } + + // Return all settings if we found any + if len(allSettings) > 0 { + return allSettings, true + } + + return nil, false +} + +// extractComponentSettings extracts settings from a component. +func extractComponentSettings(componentData interface{}) interface{} { + comp, ok := componentData.(map[string]interface{}) + if !ok { + return nil + } + + settings, ok := comp["settings"].(map[string]interface{}) + if !ok { + return nil + } + + return deepCopyToStringMap(settings) +} + +// deepCopyToStringMap creates a deep copy of a map, ensuring all keys are strings. +// This helps with JSON marshaling which requires string keys. +func deepCopyToStringMap(m interface{}) interface{} { + switch m := m.(type) { + case map[string]interface{}: + copy := make(map[string]interface{}) + for k, v := range m { + copy[k] = deepCopyToStringMap(v) + } + return copy + case map[interface{}]interface{}: + copy := make(map[string]interface{}) + for k, v := range m { + copy[fmt.Sprintf("%v", k)] = deepCopyToStringMap(v) + } + return copy + case []interface{}: + copy := make([]interface{}, len(m)) + for i, v := range m { + copy[i] = deepCopyToStringMap(v) + } + return copy + default: + return m + } +} + +// handleTerraformComponent processes regular terraform components. +func handleTerraformComponent(stack map[string]interface{}, component string, includeAbstract bool) (map[string]interface{}, bool) { + components, ok := stack["components"].(map[string]interface{}) + if !ok { + return nil, false + } + + terraform, ok := components["terraform"].(map[string]interface{}) + if !ok { + return nil, false + } + + componentName := strings.TrimPrefix(component, "terraform/") + comp, ok := terraform[componentName].(map[string]interface{}) + if !ok { + return nil, false + } + + if !includeAbstract { + if isAbstract, ok := comp["abstract"].(bool); ok && isAbstract { + return nil, false + } + } + + vars, ok := comp["vars"].(map[string]interface{}) + if !ok { + return nil, false + } + + return vars, true +} + +// formatArrayValue converts an array to a comma-separated string. +func formatArrayValue(value interface{}) interface{} { + if arr, ok := value.([]interface{}); ok { + strValues := make([]string, len(arr)) + for i, v := range arr { + strValues[i] = fmt.Sprintf("%v", v) + } + return strings.Join(strValues, ",") + } + return value +} + +// ExtractStackValues implements the ValueExtractor interface for DefaultExtractor. +func (e *DefaultExtractor) ExtractStackValues(stacksMap map[string]interface{}, component string, includeAbstract bool) (map[string]interface{}, error) { + values := make(map[string]interface{}) + + for stackName, stackData := range stacksMap { + stack, ok := stackData.(map[string]interface{}) + if !ok { + continue + } + + // Handle special components (settings, metadata). + if component == "settings" || component == "metadata" { + if section, ok := handleSpecialComponent(stack, component); ok { + values[stackName] = section + } + continue + } + + // Handle regular terraform components. + if vars, ok := handleTerraformComponent(stack, component, includeAbstract); ok { + values[stackName] = vars + } + } + + if len(values) == 0 { + return nil, &errors.NoValuesFoundError{Component: component} + } + + return values, nil +} + +// ApplyValueQuery implements the ValueExtractor interface for DefaultExtractor. +func (e *DefaultExtractor) ApplyValueQuery(values map[string]interface{}, query string) (map[string]interface{}, error) { + if query == "" { + return values, nil + } + + result := make(map[string]interface{}) + for stackName, stackData := range values { + data, ok := stackData.(map[string]interface{}) + if !ok { + continue + } + + // Get value using query path. + value := getValueFromPath(data, query) + if value != nil { + result[stackName] = map[string]interface{}{ + "value": formatArrayValue(value), + } + } + } + + if len(result) == 0 { + return nil, &errors.QueryError{ + Query: query, + Cause: &errors.NoValuesFoundError{Component: "query", Query: query}, + } + } + + return result, nil +} + +// getValueFromPath gets a value from a nested map using a dot-separated path. +func getValueFromPath(data map[string]interface{}, path string) interface{} { + if path == "" { + return data + } + + parts := strings.Split(strings.TrimPrefix(path, "."), ".") + return navigatePath(data, parts) +} + +// navigatePath follows a path of parts through nested data structures. +func navigatePath(data interface{}, parts []string) interface{} { + current := data + + for _, part := range parts { + if part == "" { + continue + } + + switch v := current.(type) { + case map[string]interface{}: + var found bool + current, found = processMapPart(v, part) + if !found { + return nil + } + case []interface{}: + var found bool + current, found = processArrayPart(v, part) + if !found { + return current // Return array if we can't process part + } + default: + return nil + } + } + + return current +} + +// processMapPart handles traversing a map with the given part key. +func processMapPart(mapData map[string]interface{}, part string) (interface{}, bool) { + // Check for direct key match first + if val, exists := mapData[part]; exists { + return val, true + } + + // If the part contains a wildcard pattern, check all keys + if strings.Contains(part, "*") { + return processWildcardPattern(mapData, part) + } + + // No match found + return nil, false +} + +// processWildcardPattern handles wildcard matching in map keys. +func processWildcardPattern(mapData map[string]interface{}, pattern string) (interface{}, bool) { + matchFound := false + result := make(map[string]interface{}) + + for key, val := range mapData { + matched, err := utils.MatchWildcard(pattern, key) + if err == nil && matched { + matchFound = true + result[key] = val + } + } + + if !matchFound { + return nil, false + } + + // If only one match, continue with that value + if len(result) == 1 { + for _, val := range result { + return val, true + } + } + + // Otherwise return the map of all matches + return result, true +} + +// processArrayPart handles traversing an array with the given part. +func processArrayPart(arrayData []interface{}, part string) (interface{}, bool) { + // If part is a number, get that specific index + if idx, err := strconv.Atoi(part); err == nil && idx >= 0 && idx < len(arrayData) { + return arrayData[idx], true + } + + // If array has map elements, try to access by key + if len(arrayData) > 0 { + if mapElement, ok := arrayData[0].(map[string]interface{}); ok { + if val, exists := mapElement[part]; exists { + return val, true + } + } + } + + // Return false to indicate we should return the array itself + return nil, false +} diff --git a/pkg/list/values/extract_test.go b/pkg/list/values/extract_test.go new file mode 100644 index 0000000000..e56770c475 --- /dev/null +++ b/pkg/list/values/extract_test.go @@ -0,0 +1,201 @@ +package values + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExtractStackValues(t *testing.T) { + tests := []struct { + name string + stacksMap map[string]interface{} + component string + includeAbstract bool + expectedValues map[string]interface{} + expectError bool + }{ + { + name: "extract regular component values", + stacksMap: map[string]interface{}{ + "stack1": map[string]interface{}{ + "components": map[string]interface{}{ + "terraform": map[string]interface{}{ + "vpc": map[string]interface{}{ + "vars": map[string]interface{}{ + "region": "us-west-2", + }, + }, + }, + }, + }, + }, + component: "vpc", + expectedValues: map[string]interface{}{ + "stack1": map[string]interface{}{ + "region": "us-west-2", + }, + }, + }, + { + name: "extract settings component", + stacksMap: map[string]interface{}{ + "stack1": map[string]interface{}{ + "settings": map[string]interface{}{ + "region": "us-west-2", + }, + }, + }, + component: "settings", + expectedValues: map[string]interface{}{ + "stack1": map[string]interface{}{ + "region": "us-west-2", + }, + }, + }, + { + name: "skip abstract component", + stacksMap: map[string]interface{}{ + "stack1": map[string]interface{}{ + "components": map[string]interface{}{ + "terraform": map[string]interface{}{ + "vpc": map[string]interface{}{ + "abstract": true, + "vars": map[string]interface{}{ + "region": "us-west-2", + }, + }, + }, + }, + }, + }, + component: "vpc", + includeAbstract: false, + expectError: true, + }, + { + name: "include abstract component", + stacksMap: map[string]interface{}{ + "stack1": map[string]interface{}{ + "components": map[string]interface{}{ + "terraform": map[string]interface{}{ + "vpc": map[string]interface{}{ + "abstract": true, + "vars": map[string]interface{}{ + "region": "us-west-2", + }, + }, + }, + }, + }, + }, + component: "vpc", + includeAbstract: true, + expectedValues: map[string]interface{}{ + "stack1": map[string]interface{}{ + "region": "us-west-2", + }, + }, + }, + } + + extractor := NewDefaultExtractor() + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + values, err := extractor.ExtractStackValues(test.stacksMap, test.component, test.includeAbstract) + + if test.expectError { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, test.expectedValues, values) + }) + } +} + +func TestApplyValueQuery(t *testing.T) { + tests := []struct { + name string + values map[string]interface{} + query string + expectedResult map[string]interface{} + expectError bool + }{ + { + name: "simple query", + values: map[string]interface{}{ + "stack1": map[string]interface{}{ + "region": "us-west-2", + }, + }, + query: "region", + expectedResult: map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "us-west-2", + }, + }, + }, + { + name: "nested query", + values: map[string]interface{}{ + "stack1": map[string]interface{}{ + "vpc": map[string]interface{}{ + "cidr": "10.0.0.0/16", + }, + }, + }, + query: "vpc.cidr", + expectedResult: map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "10.0.0.0/16", + }, + }, + }, + { + name: "array query", + values: map[string]interface{}{ + "stack1": map[string]interface{}{ + "subnets": []interface{}{ + "10.0.1.0/24", + "10.0.2.0/24", + }, + }, + }, + query: "subnets.0", + expectedResult: map[string]interface{}{ + "stack1": map[string]interface{}{ + "value": "10.0.1.0/24", + }, + }, + }, + { + name: "invalid query", + values: map[string]interface{}{ + "stack1": map[string]interface{}{ + "region": "us-west-2", + }, + }, + query: "invalid.path", + expectError: true, + }, + } + + extractor := NewDefaultExtractor() + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + result, err := extractor.ApplyValueQuery(test.values, test.query) + + if test.expectError { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + assert.Equal(t, test.expectedResult, result) + }) + } +} diff --git a/pkg/list/values/types.go b/pkg/list/values/types.go new file mode 100644 index 0000000000..aa23b25d25 --- /dev/null +++ b/pkg/list/values/types.go @@ -0,0 +1,38 @@ +package values + +// ValueExtractor handles the extraction of values from stack configurations. +type ValueExtractor interface { + // ExtractStackValues extracts values from stack configurations for a given component. + ExtractStackValues(stacksMap map[string]interface{}, component string, includeAbstract bool) (map[string]interface{}, error) + + // ApplyValueQuery applies a query to extracted values and returns the filtered results. + ApplyValueQuery(values map[string]interface{}, query string) (map[string]interface{}, error) +} + +// StackValue represents a value extracted from a stack. +type StackValue struct { + Value interface{} + IsAbstract bool + Stack string +} + +// QueryResult represents the result of applying a query to stack values. +type QueryResult struct { + Values map[string]interface{} + Query string +} + +// ExtractOptions contains options for value extraction. +type ExtractOptions struct { + Component string + IncludeAbstract bool + StackPattern string +} + +// DefaultExtractor provides a default implementation of ValueExtractor. +type DefaultExtractor struct{} + +// NewDefaultExtractor creates a new DefaultExtractor. +func NewDefaultExtractor() *DefaultExtractor { + return &DefaultExtractor{} +} diff --git a/pkg/utils/wildcard.go b/pkg/utils/wildcard.go new file mode 100644 index 0000000000..b50e56c8e6 --- /dev/null +++ b/pkg/utils/wildcard.go @@ -0,0 +1,28 @@ +package utils + +import ( + "path/filepath" + + "github.com/bmatcuk/doublestar/v4" +) + +// MatchWildcard checks if a string matches a wildcard pattern. +// The pattern can include glob patterns: +// - '*' matches any sequence of non-separator characters. +// - '?' matches any single non-separator character. +// - '[abc]' matches any character within the brackets. +// - '[a-z]' matches any character in the range. +// - '**' matches any number of directories or files recursively. +// - '{abc,xyz}` matches the string "abc" or "xyz". +func MatchWildcard(pattern, str string) (bool, error) { + // Handle empty pattern as match all + if pattern == "" { + return true, nil + } + + // Convert pattern to filepath-style pattern + pattern = filepath.ToSlash(pattern) + str = filepath.ToSlash(str) + + return doublestar.PathMatch(pattern, str) +} diff --git a/pkg/utils/wildcard_test.go b/pkg/utils/wildcard_test.go new file mode 100644 index 0000000000..b1ff392751 --- /dev/null +++ b/pkg/utils/wildcard_test.go @@ -0,0 +1,167 @@ +package utils + +import ( + "testing" +) + +func TestMatchWildcard(t *testing.T) { + tests := []struct { + name string + pattern string + str string + want bool + wantErr bool + }{ + { + name: "empty pattern", + pattern: "", + str: "anything", + want: true, + wantErr: false, + }, + { + name: "exact match", + pattern: "file.txt", + str: "file.txt", + want: true, + wantErr: false, + }, + { + name: "single star", + pattern: "*.txt", + str: "file.txt", + want: true, + wantErr: false, + }, + { + name: "single star no match", + pattern: "*.txt", + str: "file.log", + want: false, + wantErr: false, + }, + { + name: "question mark", + pattern: "file.???", + str: "file.txt", + want: true, + wantErr: false, + }, + { + name: "character class", + pattern: "file.[tl]og", + str: "file.log", + want: true, + wantErr: false, + }, + { + name: "character range", + pattern: "file[a-z].txt", + str: "filea.txt", + want: true, + wantErr: false, + }, + { + name: "double star directory matching", + pattern: "dir/**/*.txt", + str: "dir/subdir/file.txt", + want: true, + wantErr: false, + }, + { + name: "double star deep directory matching", + pattern: "dir/**/*.txt", + str: "dir/subdir/another/deep/file.txt", + want: true, + wantErr: false, + }, + { + name: "double star with single file match", + pattern: "dir/**", + str: "dir/file.txt", + want: true, + wantErr: false, + }, + { + name: "double star no match", + pattern: "dir/**/*.txt", + str: "other/subdir/file.txt", + want: false, + wantErr: false, + }, + // Stack name pattern tests + { + name: "stack environment pattern match", + pattern: "*-dev-*", + str: "tenant1-dev-us-east-1", + want: true, + wantErr: false, + }, + { + name: "stack environment pattern no match", + pattern: "*-dev-*", + str: "tenant1-prod-us-east-1", + want: false, + wantErr: false, + }, + { + name: "stack environment brace expansion match dev", + pattern: "*-{dev,staging}-*", + str: "tenant1-dev-us-east-1", + want: true, + wantErr: false, + }, + { + name: "stack environment brace expansion match staging", + pattern: "*-{dev,staging}-*", + str: "tenant1-staging-us-east-1", + want: true, + wantErr: false, + }, + { + name: "stack environment brace expansion no match", + pattern: "*-{dev,staging}-*", + str: "tenant1-prod-us-east-1", + want: false, + wantErr: false, + }, + { + name: "stack with region pattern match", + pattern: "*-us-east-*", + str: "tenant1-prod-us-east-1", + want: true, + wantErr: false, + }, + { + name: "stack with region and environment pattern match", + pattern: "*-dev-*-east-*", + str: "tenant1-dev-us-east-1", + want: true, + wantErr: false, + }, + { + name: "stack with tenant pattern match", + pattern: "tenant1-*", + str: "tenant1-dev-us-east-1", + want: true, + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := MatchWildcard(tt.pattern, tt.str) + + // Check error + if (err != nil) != tt.wantErr { + t.Errorf("MatchWildcard() error = %v, wantErr %v", err, tt.wantErr) + return + } + + // Check result + if got != tt.want { + t.Errorf("MatchWildcard() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/tests/snapshots/TestCLICommands_atmos_--help.stdout.golden b/tests/snapshots/TestCLICommands_atmos_--help.stdout.golden index 203e5d92c4..df65ddc317 100644 --- a/tests/snapshots/TestCLICommands_atmos_--help.stdout.golden +++ b/tests/snapshots/TestCLICommands_atmos_--help.stdout.golden @@ -17,7 +17,7 @@ Available Commands: docs Open Atmos documentation or display component-specific docs helmfile Manage Helmfile-based Kubernetes deployments help Display help information for Atmos commands - list List available stacks and components + list List Atmos resources and configurations pro Access premium features integrated with app.cloudposse.com support Show Atmos support options terraform Execute Terraform commands (e.g., plan, apply, destroy) using Atmos stack configurations diff --git a/tests/snapshots/TestCLICommands_atmos_terraform_--help_alias_subcommand_check.stdout.golden b/tests/snapshots/TestCLICommands_atmos_terraform_--help_alias_subcommand_check.stdout.golden index c08724b291..6001c3a35b 100644 --- a/tests/snapshots/TestCLICommands_atmos_terraform_--help_alias_subcommand_check.stdout.golden +++ b/tests/snapshots/TestCLICommands_atmos_terraform_--help_alias_subcommand_check.stdout.golden @@ -7,7 +7,7 @@ Usage: Aliases: - terraform, tf, tr + terraform, tf Available Commands: @@ -47,11 +47,6 @@ Native terraform Commands: version Show the current Terraform version workspace Manage Terraform workspaces -Subcommand Aliases: - - ta Alias of "atmos terraform apply" command - tp Alias of "atmos terraform plan" command - Flags: diff --git a/website/docs/cli/commands/list/list-metadata.mdx b/website/docs/cli/commands/list/list-metadata.mdx new file mode 100644 index 0000000000..e76a4438af --- /dev/null +++ b/website/docs/cli/commands/list/list-metadata.mdx @@ -0,0 +1,104 @@ +--- +title: "atmos list metadata" +id: "list-metadata" +--- + +# atmos list metadata + +The `atmos list metadata` command displays component metadata across all stacks. + +## Usage + +```shell +atmos list metadata [flags] +``` + +## Description + +The `atmos list metadata` command helps you inspect component metadata across different stacks. It provides a tabular view where: + +- Each column represents a stack (e.g., dev-ue1, staging-ue1, prod-ue1) +- Each row represents a key in the component's metadata +- Cells contain the metadata values for each key in each stack + +The command is particularly useful for: +- Comparing component metadata across different environments +- Verifying component types and versions across stacks +- Understanding component organization patterns across your infrastructure + +## Flags + +
+
`--query string`
+
JMESPath query to filter metadata (default: `.metadata`)
+
`--max-columns int`
+
Maximum number of columns to display (default: `50`)
+
`--format string`
+
Output format: `table`, `json`, `yaml`, `csv`, `tsv` (default: `table`)
+
`--delimiter string`
+
Delimiter for csv/tsv output (default: `,` for csv, `\t` for tsv)
+
`--stack string`
+
Filter by stack pattern (e.g., `*-dev-*`, `prod-*`, `*-{dev,staging}-*`)
+
+ +## Examples + +List all metadata: +```shell +atmos list metadata +``` + +List metadata for specific stacks: +```shell +# List metadata for dev stacks +atmos list metadata --stack '*-dev-*' + +# List metadata for production stacks +atmos list metadata --stack 'prod-*' +``` + +List specific metadata using JMESPath queries: +```shell +# Query component names +atmos list metadata --query '.metadata.component' + +# Query component types +atmos list metadata --query '.metadata.type' + +# Query component versions +atmos list metadata --query '.metadata.version' +``` + +Output in different formats: +```shell +# JSON format for machine processing +atmos list metadata --format json + +# YAML format for configuration files +atmos list metadata --format yaml + +# CSV format for spreadsheet compatibility +atmos list metadata --format csv + +# TSV format with tab delimiters +atmos list metadata --format tsv +``` + +## Example Output + +```shell +> atmos list metadata +┌──────────────┬──────────────┬──────────────┬──────────────┐ +│ │ dev-ue1 │ staging-ue1 │ prod-ue1 │ +├──────────────┼──────────────┼──────────────┼──────────────┤ +│ component │ vpc │ vpc │ vpc │ +│ type │ terraform │ terraform │ terraform │ +│ version │ 1.0.0 │ 1.0.0 │ 1.0.0 │ +└──────────────┴──────────────┴──────────────┴──────────────┘ +``` + +:::tip +- For wide tables, try using more specific queries or reduce the number of stacks +- Stack patterns support glob matching (e.g., `*-dev-*`, `prod-*`, `*-{dev,staging}-*`) +- Metadata is typically found under component configurations +::: diff --git a/website/docs/cli/commands/list/list-settings.mdx b/website/docs/cli/commands/list/list-settings.mdx new file mode 100644 index 0000000000..ae3ba8b7c2 --- /dev/null +++ b/website/docs/cli/commands/list/list-settings.mdx @@ -0,0 +1,104 @@ +--- +title: "atmos list settings" +id: "list-settings" +--- + +# atmos list settings + +The `atmos list settings` command displays component settings across all stacks. + +## Usage + +```shell +atmos list settings [flags] +``` + +## Description + +The `atmos list settings` command helps you inspect component settings across different stacks. It provides a tabular view where: + +- Each column represents a stack (e.g., dev-ue1, staging-ue1, prod-ue1) +- Each row represents a key in the component's settings +- Cells contain the settings values for each key in each stack (only scalars at this time) + +The command is particularly useful for: +- Comparing component settings across different environments +- Verifying settings are configured correctly in each stack +- Understanding component configuration patterns across your infrastructure + +## Flags + +
+
`--query string`
+
Dot-notation path query to filter settings (e.g., `.settings.templates`). Uses a simplified path syntax, not full JMESPath.
+
`--max-columns int`
+
Maximum number of columns to display (default: `50`)
+
`--format string`
+
Output format: `table`, `json`, `yaml`, `csv`, `tsv` (default: `table`)
+
`--delimiter string`
+
Delimiter for csv/tsv output (default: `,` for csv, `\t` for tsv)
+
`--stack string`
+
Filter by stack by wildcard pattern (e.g., `*-dev-*`, `prod-*`, `*-{dev,staging}-*`)
+
+ +## Examples + +List all settings: +```shell +atmos list settings +``` + +List settings for specific stacks: +```shell +# List settings for dev stacks +atmos list settings --stack '*-dev-*' + +# List settings for production stacks +atmos list settings --stack 'prod-*' +``` + +List specific settings using path queries: +```shell +# Query template settings +atmos list settings --query '.settings.templates' + +# Query validation settings +atmos list settings --query '.settings.validation' + +# Query specific template configurations +atmos list settings --query '.settings.templates.gomplate' +``` + +Output in different formats: +```shell +# JSON format for machine processing +atmos list settings --format json + +# YAML format for configuration files +atmos list settings --format yaml + +# CSV format for spreadsheet compatibility +atmos list settings --format csv + +# TSV format with tab delimiters +atmos list settings --format tsv +``` + +## Example Output + +```shell +> atmos list settings +┌──────────────┬──────────────┬──────────────┬──────────────┐ +│ │ dev-ue1 │ staging-ue1 │ prod-ue1 │ +├──────────────┼──────────────┼──────────────┼──────────────┤ +│ templates │ {...} │ {...} │ {...} │ +│ validation │ {...} │ {...} │ {...} │ +└──────────────┴──────────────┴──────────────┴──────────────┘ +``` + + +:::tip +- For wide tables, try using more specific queries or reduce the number of stacks +- Stack patterns support glob matching (e.g., `*-dev-*`, `prod-*`, `*-{dev,staging}-*`) +- Settings are typically found under component configurations +::: diff --git a/website/docs/cli/commands/list/list-values.mdx b/website/docs/cli/commands/list/list-values.mdx new file mode 100644 index 0000000000..6044aa060f --- /dev/null +++ b/website/docs/cli/commands/list/list-values.mdx @@ -0,0 +1,138 @@ +--- +title: "atmos list values" +id: "list-values" +--- + +# atmos list values + +The `atmos list values` command displays component values across all stacks where the component is used. + +## Usage + +```shell +atmos list values [component] [flags] +``` + +## Description + +The `atmos list values` command helps you inspect component values across different stacks. It provides a tabular view where: + +- Each column represents a stack (e.g., dev-ue1, staging-ue1, prod-ue1) +- Each row represents a key in the component's configuration +- Cells contain the values for each key in each stack + +The command is particularly useful for: +- Comparing component configurations across different environments +- Verifying values are set correctly in each stack +- Understanding how a component is configured across your infrastructure + +## Flags + +
+
`--query string`
+
Dot-notation path query to filter values (e.g., `.vars.enabled`). Uses a simplified path syntax, not full JMESPath.
+
`--abstract`
+
Include abstract components in the output
+
`--max-columns int`
+
Maximum number of columns to display (default: `10`)
+
`--format string`
+
Output format: `table`, `json`, `csv`, `tsv` (default: `table`)
+
`--delimiter string`
+
Delimiter for csv/tsv output (default: `,` for csv, `\t` for tsv)
+
+ +## Examples + +List all values for a component: +```shell +atmos list values vpc +``` + +List only variables for a component (using the alias): +```shell +atmos list vars vpc +``` + +List values with a custom path query: +```shell +# Query specific variables +atmos list values vpc --query .vars.enabled + +# Query environment settings +atmos list values vpc --query .vars.environment + +# Query network configuration +atmos list values vpc --query .vars.ipv4_primary_cidr_block +``` + +Include abstract components: +```shell +atmos list values vpc --abstract +``` + +Limit the number of columns: +```shell +atmos list values vpc --max-columns 5 +``` + +Output in different formats: +```shell +# JSON format for machine processing +atmos list values vpc --format json + +# CSV format for spreadsheet compatibility +atmos list values vpc --format csv + +# TSV format with tab delimiters +atmos list values vpc --format tsv + +# Note: Use JSON or CSV formats when dealing with wide datasets +# The table format will show a width error if the data is too wide for your terminal +``` + +## Example Output + +```shell +> atmos list vars vpc +┌──────────────┬──────────────┬──────────────┬──────────────┐ +│ │ dev-ue1 │ staging-ue1 │ prod-ue1 │ +├──────────────┼──────────────┼──────────────┼──────────────┤ +│ enabled │ true │ true │ true │ +│ name │ dev-vpc │ staging-vpc │ prod-vpc │ +│ cidr_block │ 10.0.0.0/16 │ 10.1.0.0/16 │ 10.2.0.0/16 │ +│ environment │ dev │ staging │ prod │ +│ namespace │ example │ example │ example │ +│ stage │ dev │ staging │ prod │ +│ region │ us-east-1 │ us-east-1 │ us-east-1 │ +└──────────────┴──────────────┴──────────────┴──────────────┘ +``` + +### Nested Object Display + +When listing values that contain nested objects: + +1. In table format, nested objects appear as `{...}` placeholders +2. Use `--format json` or `--format yaml` to see the complete nested structure +3. You can query specific nested paths using the dot notation: `--query .vars.tags.Environment` + +Example JSON output with nested objects: +```json +{ + "dev-ue1": { + "cidr_block": "10.0.0.0/16", + "tags": { + "Environment": "dev", + "Team": "devops" + }, + "subnets": [ + "10.0.1.0/24", + "10.0.2.0/24" + ] + } +} +``` + +## Related Commands + +- [atmos list components](/cli/commands/list/component) - List available components +- [atmos describe component](/cli/commands/describe/component) - Show detailed information about a component