From 22f95c42dc08725877bbd2e0c7ef112e8ad19748 Mon Sep 17 00:00:00 2001 From: Costi Muraru Date: Thu, 8 Aug 2019 18:19:17 +0300 Subject: [PATCH] Add helmfile/terraform runner with hierarchical configuration support (#44) * Add helmfile/terraform runner with hiera-like configuration support Signed-off-by: Constantin Muraru * Fix tests Signed-off-by: Constantin Muraru * Work1 * Integrate with existing terraform Signed-off-by: Constantin Muraru * Rename ee to hierarchical * Minor tweaks * Update requirements.txt * Add example * Fix ansible warning * Fix build Signed-off-by: cmuraru * Tweaks Signed-off-by: Constantin Muraru * Tweaks * Fixes * Fixes * Fixes * Update readme * Downgrade aws-cli until it works in Spinnaker Signed-off-by: cmuraru * Add epilog Signed-off-by: cmuraru Signed-off-by: Constantin Muraru --- .gitignore | 4 + README.md | 10 +- build_scripts/Dockerfile | 1 + build_scripts/docker_push.sh | 4 +- .../terraform-hierarchical/.opsconfig.yaml | 9 + .../features/terraform-hierarchical/README.md | 15 + .../compositions/terraform/cluster/main.tf | 10 + .../compositions/terraform/network/main.tf | 6 + .../composition=cluster/conf.yaml | 0 .../composition=network/conf.yaml | 0 .../config/env=dev/cluster=cluster1/conf.yaml | 2 + .../composition=cluster/conf.yaml | 0 .../composition=network/conf.yaml | 0 .../config/env=dev/cluster=cluster2/conf.yaml | 2 + .../config/env=dev/default.yaml | 18 + .../modules/cluster/main.tf | 5 + .../modules/network/main.tf | 17 + requirements.txt | 10 +- setup.py | 2 +- src/ops/__init__.py | 1 + src/ops/cli/config.py | 6 + src/ops/cli/config_generator.py | 68 +++ src/ops/cli/helmfile.py | 75 +++ src/ops/cli/terraform.py | 570 ++++-------------- src/ops/hierarchical/__init__.py | 0 .../composition_config_generator.py | 118 ++++ src/ops/hierarchical/config_generator.py | 238 ++++++++ src/ops/hierarchical/inject_secrets.py | 62 ++ src/ops/hierarchical/interpolation.py | 103 ++++ src/ops/hierarchical/remote_state.py | 32 + src/ops/hierarchical/secret_resolvers.py | 59 ++ src/ops/main.py | 16 +- src/ops/simplessm.py | 1 - src/ops/terraform/__init__.py | 0 src/ops/terraform/terraform_cmd_generator.py | 467 ++++++++++++++ tests/e2e/common.py | 8 + .../unit/test_composition_config_generator.py | 19 + 37 files changed, 1490 insertions(+), 468 deletions(-) create mode 100644 examples/features/terraform-hierarchical/.opsconfig.yaml create mode 100644 examples/features/terraform-hierarchical/README.md create mode 100644 examples/features/terraform-hierarchical/compositions/terraform/cluster/main.tf create mode 100644 examples/features/terraform-hierarchical/compositions/terraform/network/main.tf create mode 100644 examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/composition=cluster/conf.yaml create mode 100644 examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/composition=network/conf.yaml create mode 100644 examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/conf.yaml create mode 100644 examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/composition=cluster/conf.yaml create mode 100644 examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/composition=network/conf.yaml create mode 100644 examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/conf.yaml create mode 100644 examples/features/terraform-hierarchical/config/env=dev/default.yaml create mode 100644 examples/features/terraform-hierarchical/modules/cluster/main.tf create mode 100644 examples/features/terraform-hierarchical/modules/network/main.tf create mode 100644 src/ops/cli/config_generator.py create mode 100644 src/ops/cli/helmfile.py create mode 100644 src/ops/hierarchical/__init__.py create mode 100644 src/ops/hierarchical/composition_config_generator.py create mode 100755 src/ops/hierarchical/config_generator.py create mode 100644 src/ops/hierarchical/inject_secrets.py create mode 100644 src/ops/hierarchical/interpolation.py create mode 100644 src/ops/hierarchical/remote_state.py create mode 100644 src/ops/hierarchical/secret_resolvers.py create mode 100644 src/ops/terraform/__init__.py create mode 100644 src/ops/terraform/terraform_cmd_generator.py create mode 100644 tests/e2e/common.py create mode 100644 tests/unit/test_composition_config_generator.py diff --git a/.gitignore b/.gitignore index 1e8b169..a66194c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,8 @@ ops.egg-info/ +*.plan +*.tf.json +*.tfvars.json +*.tfstate .cache/ *.pyc .terraform diff --git a/README.md b/README.md index 4f89ba9..70453fd 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,10 @@ ops clusters/mycluster.yaml terraform --path-name aws-eks apply ![ops-terraform](https://user-images.githubusercontent.com/952836/52021396-9bc1b580-24fd-11e9-9da8-00fb68bd5c72.png) +## Run terraform by using hierarchical configs + +See [examples/features/terraform-hierarchical](https://github.com/adobe/ops-cli/tree/master/examples/features/terraform-hierarchical) + ## Create Kubernetes cluster (using AWS EKS) See [examples/aws-kubernetes](https://github.com/adobe/ops-cli/tree/master/examples/aws-kubernetes) @@ -85,8 +89,8 @@ pip2 install -U virtualenv virtualenv ops source ops/bin/activate -# install opswrapper v0.36 stable release -pip2 install --upgrade https://github.com/adobe/ops-cli/releases/download/0.36/ops-0.36.tar.gz +# install opswrapper v1.0 stable release +pip2 install --upgrade https://github.com/adobe/ops-cli/releases/download/1.0/ops-1.0.tar.gz # Optionally, install terraform to be able to access terraform plugin # See https://www.terraform.io/intro/getting-started/install.html @@ -99,7 +103,7 @@ You can try out `ops-cli`, by using docker. The docker image has all required pr To start out a container, running the latest `ops-cli` docker image run: ```sh -docker run -it adobe/ops-cli:0.36 bash +docker run -it adobe/ops-cli:1.0 bash ``` After the container has started, you can start using `ops-cli`: diff --git a/build_scripts/Dockerfile b/build_scripts/Dockerfile index da02d37..bf21241 100644 --- a/build_scripts/Dockerfile +++ b/build_scripts/Dockerfile @@ -73,6 +73,7 @@ RUN curl -sSL https://github.com/databus23/helm-diff/releases/download/v${HELM_D USER root RUN HELM_HOME=/home/ops/.helm helm plugin install https://github.com/futuresimple/helm-secrets +RUN HELM_HOME=/home/ops/.helm helm plugin install https://github.com/rimusz/helm-tiller RUN chown -R ops:ops /home/ops/.helm/plugins COPY --from=compile-image /azure-cli /home/ops/.local/azure-cli diff --git a/build_scripts/docker_push.sh b/build_scripts/docker_push.sh index 2a4838f..496c31b 100644 --- a/build_scripts/docker_push.sh +++ b/build_scripts/docker_push.sh @@ -2,5 +2,5 @@ set -e echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin -docker tag ops adobe/ops-cli:0.36 -docker push adobe/ops-cli:0.36 +docker tag ops adobe/ops-cli:1.0 +docker push adobe/ops-cli:1.0 diff --git a/examples/features/terraform-hierarchical/.opsconfig.yaml b/examples/features/terraform-hierarchical/.opsconfig.yaml new file mode 100644 index 0000000..fe91426 --- /dev/null +++ b/examples/features/terraform-hierarchical/.opsconfig.yaml @@ -0,0 +1,9 @@ +--- +compositions_order: + terraform: + - account + - network + - cluster + - spinnaker + helmfile: + - helmfiles diff --git a/examples/features/terraform-hierarchical/README.md b/examples/features/terraform-hierarchical/README.md new file mode 100644 index 0000000..aac0d8f --- /dev/null +++ b/examples/features/terraform-hierarchical/README.md @@ -0,0 +1,15 @@ +1. Run 'terraform plan' for all compositions for a given cluster: +```sh +# generates config and runs terraform +ops config/env=dev/cluster=cluster1 terraform plan +``` + +2. Run 'terraform apply' for all compositions for a given cluster: +```sh +ops config/env=dev/cluster=cluster1 terraform apply --skip-plan +``` + +3. Run a single composition: +```sh +ops config/env=dev/cluster=cluster1/composition=network terraform apply --skip-plan +``` \ No newline at end of file diff --git a/examples/features/terraform-hierarchical/compositions/terraform/cluster/main.tf b/examples/features/terraform-hierarchical/compositions/terraform/cluster/main.tf new file mode 100644 index 0000000..95acf1e --- /dev/null +++ b/examples/features/terraform-hierarchical/compositions/terraform/cluster/main.tf @@ -0,0 +1,10 @@ +variable "config" {} + +module "cluster" { + source = "../../../modules/cluster" + config = var.config +} + +output "cluster_name" { + value = var.config.cluster.name +} diff --git a/examples/features/terraform-hierarchical/compositions/terraform/network/main.tf b/examples/features/terraform-hierarchical/compositions/terraform/network/main.tf new file mode 100644 index 0000000..d1c0034 --- /dev/null +++ b/examples/features/terraform-hierarchical/compositions/terraform/network/main.tf @@ -0,0 +1,6 @@ +variable "config" {} + +module "network" { + source = "../../../modules/network" + config = var.config +} diff --git a/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/composition=cluster/conf.yaml b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/composition=cluster/conf.yaml new file mode 100644 index 0000000..e69de29 diff --git a/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/composition=network/conf.yaml b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/composition=network/conf.yaml new file mode 100644 index 0000000..e69de29 diff --git a/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/conf.yaml b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/conf.yaml new file mode 100644 index 0000000..e79e8e8 --- /dev/null +++ b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster1/conf.yaml @@ -0,0 +1,2 @@ +cluster: + name: cluster1 diff --git a/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/composition=cluster/conf.yaml b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/composition=cluster/conf.yaml new file mode 100644 index 0000000..e69de29 diff --git a/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/composition=network/conf.yaml b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/composition=network/conf.yaml new file mode 100644 index 0000000..e69de29 diff --git a/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/conf.yaml b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/conf.yaml new file mode 100644 index 0000000..3a15c11 --- /dev/null +++ b/examples/features/terraform-hierarchical/config/env=dev/cluster=cluster2/conf.yaml @@ -0,0 +1,2 @@ +cluster: + name: cluster2 diff --git a/examples/features/terraform-hierarchical/config/env=dev/default.yaml b/examples/features/terraform-hierarchical/config/env=dev/default.yaml new file mode 100644 index 0000000..3ea81fd --- /dev/null +++ b/examples/features/terraform-hierarchical/config/env=dev/default.yaml @@ -0,0 +1,18 @@ +account: + cloud_provider: + aws: + profile: test_profile + +env: + name: dev + +region: + location: us-east-1 + name: va6 + +project: + prefix: ee + +# This value will be overridden +cluster: + name: default diff --git a/examples/features/terraform-hierarchical/modules/cluster/main.tf b/examples/features/terraform-hierarchical/modules/cluster/main.tf new file mode 100644 index 0000000..ac3efb8 --- /dev/null +++ b/examples/features/terraform-hierarchical/modules/cluster/main.tf @@ -0,0 +1,5 @@ +variable "config" {} + +output "cluster_name" { + value = var.config.cluster.name +} diff --git a/examples/features/terraform-hierarchical/modules/network/main.tf b/examples/features/terraform-hierarchical/modules/network/main.tf new file mode 100644 index 0000000..f2dd5eb --- /dev/null +++ b/examples/features/terraform-hierarchical/modules/network/main.tf @@ -0,0 +1,17 @@ +variable "config" {} + +locals { + env = var.config["env"] + region = var.config["region"]["location"] + project = var.config["project"]["prefix"] +} + +#resource "aws_s3_bucket" "bucket" { +# bucket = "${local.env}-${local.region}-${local.project}-test-bucket" +# acl = "private" + +# tags = { +# Name = "My bucket" +# Environment = "na" +# } +#} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 812d3c5..cccc0e9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,8 @@ simpledi>=0.2 -awscli==1.16.206 -ansible==2.7.10 -boto3==1.9.196 +awscli==1.16.170 +boto3==1.9.110 boto==2.49.0 -botocore==1.12.196 +ansible==2.7.12 PyYAML==3.13 azure-common==1.1.20 azure==4.0.0 @@ -15,3 +14,6 @@ hvac==0.9.3 passgen inflection==0.3.1 kubernetes==9.0.0 +deepmerge==0.0.5 +lru_cache==0.2.3 +backports.functools_lru_cache==1.5 diff --git a/setup.py b/setup.py index 5f819ee..f71775b 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ _requires = [ r for r in open(os.path.sep.join((_mydir,'requirements.txt')), "r").read().split('\n') if len(r)>1 ] setup( name='ops', - version='0.36', + version='1.0', description='Ops simple wrapper', author='Adobe', author_email='noreply@adobe.com', diff --git a/src/ops/__init__.py b/src/ops/__init__.py index a6f3d0d..a0ababd 100644 --- a/src/ops/__init__.py +++ b/src/ops/__init__.py @@ -58,3 +58,4 @@ def shadow_credentials(self, cmd): class OpsException(Exception): pass + diff --git a/src/ops/cli/config.py b/src/ops/cli/config.py index 8aae07a..9a66a49 100644 --- a/src/ops/cli/config.py +++ b/src/ops/cli/config.py @@ -52,6 +52,9 @@ def all(self): def __contains__(self, item): return item in self.conf or item in self.ops_config + def __setitem__(self, key, val): + self.conf[key] = val + def __getitem__(self, item): if item not in self.conf and item not in self.ops_config: msg = "Configuration value %s not found; update your %s" % (item, self.cluster_config_path) @@ -116,6 +119,9 @@ def __init__(self, console_args, cluster_config_path, template): self.console_args = console_args def get(self): + if os.path.isdir(self.cluster_config_path): + return {"cluster": None, "inventory": None} + data_loader = DataLoader() # data_loader.set_vault_password('627VR8*;YU99B') variable_manager = VariableManager(loader=data_loader) diff --git a/src/ops/cli/config_generator.py b/src/ops/cli/config_generator.py new file mode 100644 index 0000000..ad0c5aa --- /dev/null +++ b/src/ops/cli/config_generator.py @@ -0,0 +1,68 @@ +#Copyright 2019 Adobe. All rights reserved. +#This file is licensed to you under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. You may obtain a copy +#of the License at http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software distributed under +#the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +#OF ANY KIND, either express or implied. See the License for the specific language +#governing permissions and limitations under the License. + +import os +import logging +from ops.hierarchical.config_generator import ConfigProcessor +from ops.cli.parser import SubParserConfig + + +class ConfigGeneratorParserConfig(SubParserConfig): + def get_name(self): + return 'config' + + def get_help(self): + return 'Wrap common terraform tasks with full templated configuration support' + + def configure(self, parser): + parser.add_argument('--cwd', dest='cwd', type=str, default="", + help='the working directory') + parser.add_argument('--print-data', action='store_true', + help='print generated data on screen') + parser.add_argument('--enclosing-key', dest='enclosing_key', type=str, + help='enclosing key of the generated data') + parser.add_argument('--output-file', dest='output_file', type=str, + help='output file location') + parser.add_argument('--format', dest='output_format', type=str, default="yaml", + help='output file format') + parser.add_argument('--filter', dest='filter', action='append', + help='keep these keys from the generated data') + parser.add_argument('--exclude', dest='exclude', action='append', + help='exclude these keys from generated data') + parser.add_argument('--skip-interpolation-validation', action='store_true', + help='will not throw an error if interpolations can not be resolved') + parser.add_argument('--skip-interpolation-resolving', action='store_true', + help='do not perform any AWS calls to resolve interpolations') + return parser + + def get_epilog(self): + return ''' + + ''' + + +class ConfigGeneratorRunner(object): + def __init__(self, cluster_config_path): + self.cluster_config_path = cluster_config_path + + def run(self, args): + logging.basicConfig(level=logging.INFO) + args.path = self.cluster_config_path + if args.output_file is None: + args.print_data = True + cwd = args.cwd if args.cwd else os.getcwd() + filters = args.filter if args.filter else () + excluded_keys = args.exclude if args.exclude else () + + generator = ConfigProcessor() + generator.process(cwd, args.path, filters, excluded_keys, args.enclosing_key, args.output_format, + args.print_data, + args.output_file, args.skip_interpolation_resolving, args.skip_interpolation_validation, + display_command=False) diff --git a/src/ops/cli/helmfile.py b/src/ops/cli/helmfile.py new file mode 100644 index 0000000..9a2f55b --- /dev/null +++ b/src/ops/cli/helmfile.py @@ -0,0 +1,75 @@ +#Copyright 2019 Adobe. All rights reserved. +#This file is licensed to you under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. You may obtain a copy +#of the License at http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software distributed under +#the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +#OF ANY KIND, either express or implied. See the License for the specific language +#governing permissions and limitations under the License. + + +import os +import logging +from ops.cli.parser import SubParserConfig +from ops.hierarchical.composition_config_generator import CompositionConfigGenerator + +logger = logging.getLogger(__name__) + + +class HelmfileParserConfig(SubParserConfig): + def get_name(self): + return 'helmfile' + + def get_help(self): + return 'Wrap common helmfile tasks using hierarchical configuration support' + + def configure(self, parser): + parser.add_argument('subcommand', help='plan | sync | apply | template', type=str) + parser.add_argument('extra_args', type=str, nargs='*', help='Extra args') + parser.add_argument('--helmfile-path', type=str, default=None, help='Dir to where helmfile.yaml is located') + return parser + + def get_epilog(self): + return ''' + Examples: + # Run helmfile sync + ops data/env=dev/region=va6/project=ee/cluster=experiments/composition=helmfiles helmfile sync + # Run helmfile sync for a single chart + ops data/env=dev/region=va6/project=ee/cluster=experiments/composition=helmfiles helmfile sync -- --selector chart=nginx-controller + ''' + + +class HelmfileRunner(CompositionConfigGenerator, object): + def __init__(self, ops_config, cluster_config_path): + super(HelmfileRunner, self).__init__(["helmfiles"]) + logging.basicConfig(level=logging.INFO) + self.ops_config = ops_config + self.cluster_config_path = cluster_config_path + + def run(self, args): + config_path_prefix = os.path.join(self.cluster_config_path, '') + args.helmfile_path = '../ee-k8s-infra/compositions/helmfiles' if args.helmfile_path is None else os.path.join(args.helmfile_path, '') + + compositions= self.get_sorted_compositions(config_path_prefix) + if len(compositions) == 0 or compositions[0] != "helmfiles": + raise Exception("Please provide the full path to composition=helmfiles") + composition = compositions[0] + conf_path = self.get_config_path_for_composition(config_path_prefix, composition) + self.generate_helmfile_config(conf_path, args) + + command = self.get_helmfile_command(args) + return dict(command=command) + + def generate_helmfile_config(self, path, args): + output_file = args.helmfile_path + "/hiera-generated.yaml" + logger.info('Generating helmfiles config %s', output_file) + self.generator.process(path=path, + filters=["helm"], + output_format="yaml", + output_file=output_file, + print_data=True) + + def get_helmfile_command(self, args): + cmd = ' '.join(args.extra_args + [args.subcommand]) + return "cd {} && helmfile {}".format(args.helmfile_path, cmd) diff --git a/src/ops/cli/terraform.py b/src/ops/cli/terraform.py index 54e77f2..389ec2b 100644 --- a/src/ops/cli/terraform.py +++ b/src/ops/cli/terraform.py @@ -1,24 +1,21 @@ -#Copyright 2019 Adobe. All rights reserved. -#This file is licensed to you under the Apache License, Version 2.0 (the "License"); -#you may not use this file except in compliance with the License. You may obtain a copy -#of the License at http://www.apache.org/licenses/LICENSE-2.0 +# Copyright 2019 Adobe. All rights reserved. +# This file is licensed to you under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 -#Unless required by applicable law or agreed to in writing, software distributed under -#the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS -#OF ANY KIND, either express or implied. See the License for the specific language -#governing permissions and limitations under the License. +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +# OF ANY KIND, either express or implied. See the License for the specific language +# governing permissions and limitations under the License. import os -import re -import shutil - -from jinja2 import Environment, FileSystemLoader -from jinja2.runtime import DebugUndefined, StrictUndefined - +import hashlib +import logging from ops.cli.parser import SubParserConfig -from subprocess import Popen, PIPE +from ops.terraform.terraform_cmd_generator import TerraformCommandGenerator +from ops.hierarchical.composition_config_generator import TerraformConfigGenerator -from . import aws, err, display +logger = logging.getLogger(__name__) class TerraformParserConfig(SubParserConfig): @@ -29,24 +26,47 @@ def get_help(self): return 'Wrap common terraform tasks with full templated configuration support' def configure(self, parser): - parser.add_argument('subcommand', help='apply | console | destroy | import | output | plan | refresh | show | taint | template | untaint | validate', type=str) + parser.add_argument('subcommand', + help='apply | console | destroy | import | output | plan | refresh | show | taint | template | untaint | validate', + type=str) parser.add_argument('--var', help='the output var to show', type=str, default='') - parser.add_argument('--module', help='for use with "taint", "untaint" and "import". The module to use. e.g.: vpc', type=str) - parser.add_argument('--resource', help='for use with "taint", "untaint" and "import". The resource to target. e.g.: aws_instance.nat', type=str) - parser.add_argument('--name', help='for use with "import". The name or ID of the imported resource. e.g.: i-abcd1234', type=str) - parser.add_argument('--plan', help='for use with "show", show the plan instead of the statefile', action='store_true') - parser.add_argument('-i', '--interactive', help='for use with "apply", use the new interactive apply workflow introduced in TF 0.11.0', action='store_true') - parser.add_argument('--state-location', help='control how the remote states are used', choices=[ 'local', 'remote', 'any'], default='any', type=str) - parser.add_argument('--force-copy', help='for use with "plan" to do force state change automatically during init phase', action='store_true') - parser.add_argument('--template-location', help='for use with "template". The folder where to save the tf files, without showing', type=str) - parser.add_argument('--skip-refresh', help='for use with "plan". Skip refresh of statefile', action='store_false', dest='do_refresh') + parser.add_argument('--module', + help='for use with "taint", "untaint" and "import". The module to use. e.g.: vpc', type=str) + parser.add_argument('--resource', + help='for use with "taint", "untaint" and "import". The resource to target. e.g.: aws_instance.nat', + type=str) + parser.add_argument('--name', + help='for use with "import". The name or ID of the imported resource. e.g.: i-abcd1234', + type=str) + parser.add_argument('--plan', help='for use with "show", show the plan instead of the statefile', + action='store_true') + parser.add_argument('--state-location', help='control how the remote states are used', + choices=['local', 'remote', 'any'], default='any', type=str) + parser.add_argument('--force-copy', + help='for use with "plan" to do force state change automatically during init phase', + action='store_true') + parser.add_argument('--template-location', + help='for use with "template". The folder where to save the tf files, without showing', + type=str) + parser.add_argument('--skip-refresh', help='for use with "plan". Skip refresh of statefile', + action='store_false', dest='do_refresh') parser.set_defaults(do_refresh=True) - parser.add_argument('--raw-output', help='for use with "plan". Show raw plan output without piping through terraform landscape - ' - 'https://github.com/coinbase/terraform-landscape (if terraform landscape is not enabled in opsconfig.yaml ' - 'this will have no impact)', action='store_true', + parser.add_argument('--raw-output', + help='for use with "plan". Show raw plan output without piping through terraform landscape - ' + 'https://github.com/coinbase/terraform-landscape (if terraform landscape is not enabled in opsconfig.yaml ' + 'this will have no impact)', action='store_true', dest='raw_plan_output') parser.set_defaults(raw_plan_output=False) - parser.add_argument('--path-name', help='in case multiple terraform paths are defined, this allows to specify which one to use when running terraform', type=str) + parser.add_argument('--path-name', + help='in case multiple terraform paths are defined, this allows to specify which one to use when running terraform', + type=str) + parser.add_argument('--terraform-path', type=str, default=None, help='Path to terraform files') + parser.add_argument('--skip-plan', + help='for use with "apply"; runs terraform apply without running a plan first', + action='store_true') + parser.add_argument('--auto-approve', + help='for use with "apply". Proceeds with the apply without waiting for user confirmation.', + action='store_true') parser.add_argument('terraform_args', type=str, nargs='*', help='Extra terraform args') return parser @@ -54,18 +74,12 @@ def configure(self, parser): def get_epilog(self): return ''' Examples: - # Create a new cluster with Terraform - ops clusters/qe1.yaml terraform plan - ops clusters/qe1.yaml terraform apply - - # Update an existing cluster + # Create/update a new cluster with Terraform ops clusters/qe1.yaml terraform plan ops clusters/qe1.yaml terraform apply - # Run Terraform apply without running a plan first, runs in an interactive mode - ops clusters/qe1.yaml terraform apply --interactive - # or - ops clusters/qe1.yaml terraform apply -i + # Run Terraform apply without running a plan first + ops clusters/qe1.yaml terraform apply --skip-plan # Get rid of a cluster and all of its components ops clusters/qe1.yaml terraform destroy @@ -105,431 +119,77 @@ def get_epilog(self): # Specify which terraform path to use ops clusters/qe1.yaml terraform plan --path-name terraformFolder1 + + # Run terraform v2 integration + ops data/env=dev/region=va6/project=ee/cluster=experiments terraform plan ''' class TerraformRunner(object): - def __init__(self, root_dir, cluster_config, inventory_generator, ops_config, template): + def __init__(self, root_dir, cluster_config_path, cluster_config, inventory_generator, ops_config, template, + execute): + self.cluster_config_path = cluster_config_path self.cluster_config = cluster_config self.root_dir = root_dir self.inventory_generator = inventory_generator self.ops_config = ops_config self.template = template + self.execute = execute def run(self, args): - - self.selected_terraform_path = args.path_name - self.set_current_working_dir() - current_terraform_version = self.check_terraform_version() - config = self.cluster_config - - current_terraform_version_major = int(current_terraform_version.split('.')[1]) - if 'enable_consul_remote_state' in config['terraform']: - terraform_remote_state = config['terraform']['enable_consul_remote_state'] - elif config['terraform'].get('state', {'type': None}).get('type') == 's3': - terraform_remote_state = 'true' - else: - terraform_remote_state = 'false' - - terraform_config = config.get('terraform', {}) - terraform_path = self.get_terraform_path() - generate_module_templates = False - - plan_variables = terraform_config.get('vars', {}) - plan_variables['cluster'] = config['cluster'] - if self.cluster_config.has_ssh_keys: - plan_variables['has_ssh_keys'] = True - plan_variables['cluster_ssh_pubkey_file'] = self.cluster_config.cluster_ssh_pubkey_file - plan_variables['cluster_ssh_prvkey_file'] = self.cluster_config.cluster_ssh_prvkey_file - if terraform_config.get('boto_profile'): - self.add_profile_vars(plan_variables, terraform_config.get('boto_profile')) - - vars = '' - for key, val in plan_variables.items(): - vars += " -var '%s=%s' " % (key, val) - - state_file = 'terraform.{cluster}.tfstate'.format(cluster=config['cluster']) - plan_file = 'terraform.{cluster}.plan'.format(cluster=config['cluster']) - landscape = '' - - if current_terraform_version_major >= 9: - if args.force_copy: - terraform_init_command = 'terraform init -force-copy && ' - else: - terraform_init_command = 'terraform init && ' - # regarding state location we give priority to the cli parameter - if args.state_location == 'remote': - state_argument = '' - state_out_argument = '' - elif args.state_location == 'local': - state_argument = "-state={state_file}".format( - state_file = state_file - ) - state_out_argument = "-state-out={state_file}".format( - state_file = state_file - ) - else: - # no cli parameter, decide based on config file - if terraform_remote_state == 'true': - state_argument = '' - state_out_argument = '' - else: - state_argument = "-state={state_file}".format( - state_file=state_file - ) - state_out_argument = "-state-out={state_file}".format( - state_file=state_file - ) - else: - state_argument = "-state={state_file}".format( - state_file=state_file - ) - state_out_argument = "-state-out={state_file}".format( - state_file=state_file - ) - terraform_init_command = '' - - if args.subcommand == 'template': - if args.template_location: - self.copy_static_files(args.template_location, terraform_path) - self.write_module_templates(args.template_location) - self.write_var_file(os.path.join(args.template_location, terraform_path), plan_variables) - else: - for original, fname, contents in self.get_templated_files(): - display("# %s -> %s" % (original, fname), color="green") - display("# --------------", color="green") - display(contents) - return - - if args.subcommand == 'plan': - generate_module_templates = True - terraform_refresh_command = remove_local_cache = '' - if args.do_refresh: - terraform_refresh_command = "terraform refresh -input=false {vars} {state_argument} && ".format(vars=vars, state_argument=state_argument) - - if self.ops_config['terraform.landscape'] and not args.raw_plan_output: - landscape = '| landscape' - - if self.ops_config['terraform.remove_local_cache']: - remove_local_cache = 'rm -rf .terraform && ' - - cmd = "cd {root_dir}/{terraform_path} && " \ - "{remove_local_cache}" \ - "terraform get -update && " \ - "{terraform_init_command}" \ - "{terraform_refresh_command}" \ - "terraform plan " \ - "-out={plan_file} -refresh=false -input=false {vars} {state_argument}".format( - root_dir=self.root_dir, - terraform_path=terraform_path, - terraform_init_command=terraform_init_command, - vars=vars, - state_argument=state_argument, - plan_file=plan_file, - terraform_refresh_command=terraform_refresh_command, - remove_local_cache=remove_local_cache - ) - - elif args.subcommand == 'apply': - # the following is to have auxiliary rendered/templated files like cloudinit.yaml - # that also needs templating. Without it, plan works but apply does not for this kind of files - # todo maybe this deserves a better implementation later - generate_module_templates = True - - self.inventory_generator.clear_cache() - if args.interactive: - cmd = "cd {root_dir}/{terraform_path} && {terraform_init_command}" \ - "rm -f {plan_file} && terraform apply {vars}" \ - "-refresh=true {state_argument}".format( - plan_file=plan_file, - root_dir=self.root_dir, - state_argument=state_argument, - terraform_init_command=terraform_init_command, - terraform_path=terraform_path, - vars=vars, - ) - else: - cmd = "cd {root_dir}/{terraform_path} && " \ - "terraform apply " \ - "-refresh=true {state_out_argument} {plan_file}; code=$?; rm {plan_file}; exit $code".format( - plan_file=plan_file, - root_dir=self.root_dir, - state_out_argument=state_out_argument, - terraform_path=terraform_path, - vars=vars, - ) - - elif args.subcommand == 'destroy': - generate_module_templates = True - remove_local_cache = '' - - if self.ops_config['terraform.remove_local_cache']: - remove_local_cache = 'rm -rf .terraform && ' - cmd = "cd {root_dir}/{terraform_path} && " \ - "{remove_local_cache}" \ - "{terraform_init_command}" \ - "terraform plan -destroy " \ - "-refresh=true {vars} {state_argument} && " \ - "terraform destroy {vars} {state_argument} -refresh=true".format( - root_dir=self.root_dir, - terraform_path=terraform_path, - vars=vars, - state_argument=state_argument, - terraform_init_command=terraform_init_command, - remove_local_cache=remove_local_cache - ) - elif args.subcommand == 'output': - cmd = "cd {root_dir}/{terraform_path} && " \ - "terraform output {state_argument} {output}".format( - root_dir=self.root_dir, - terraform_path=terraform_path, - output=args.var, - state_argument=state_argument - ) - elif args.subcommand == 'refresh': - generate_module_templates = True - cmd = "cd {root_dir}/{terraform_path} && " \ - "terraform get -update && " \ - "terraform refresh {state_argument} {vars}".format( - root_dir=self.root_dir, - terraform_path=terraform_path, - vars=vars, - state_argument=state_argument - ) - elif args.subcommand == 'taint' or args.subcommand == 'untaint': - cmd = "cd {root_dir}/{terraform_path} && " \ - "{terraform_init_command}" \ - "terraform {command} {state_argument} -module={module} {resource}".format( - root_dir=self.root_dir, - command=args.subcommand, - terraform_path=terraform_path, - resource=args.resource, - module=args.module, - state_argument=state_argument, - terraform_init_command=terraform_init_command - ) - elif args.subcommand == 'show': - if args.plan: - state=plan_file - else: - state=state_file - - cmd = "cd {root_dir}/{terraform_path} && " \ - "terraform show {state}".format( - root_dir=self.root_dir, - terraform_path=terraform_path, - state=state - ) - elif args.subcommand == 'import': - generate_module_templates = True - cmd = "cd {root_dir}/{terraform_path} && " \ - "terraform import {state_argument} {vars} module.{module}.{resource} {name}".format( - root_dir=self.root_dir, - command=args.subcommand, - terraform_path=terraform_path, - resource=args.resource, - module=args.module, - name=args.name, - state_argument=state_argument, - vars=vars, - ) - elif args.subcommand == 'console': - generate_module_templates = True - cmd = "cd {root_dir}/{terraform_path} && " \ - "terraform {command} {state_argument} {vars}".format( - root_dir=self.root_dir, - command=args.subcommand, - terraform_path=terraform_path, - state_argument=state_argument, - vars=vars, - ) - elif args.subcommand == 'validate': - generate_module_templates = True - cmd = "cd {root_dir}/{terraform_path} && {terraform_init_command} " \ - "terraform {command} {vars}".format( - command=args.subcommand, - root_dir=self.root_dir, - terraform_init_command=terraform_init_command, - terraform_path=terraform_path, - vars=vars, - ) - elif args.subcommand is not None: - # Examples: - # - command = "state push errored.tfstate" - # - command = "force-unlock " - generate_module_templates = True - cmd = "cd {root_dir}/{terraform_path} && {terraform_init_command} " \ - "terraform {command}".format( - command=args.subcommand, - root_dir=self.root_dir, - terraform_init_command=terraform_init_command, - terraform_path=terraform_path, - ) + if os.path.isdir(self.cluster_config_path): + return self.run_v2_integration(args) else: - display('Terraform subcommand \'%s\' not found' % args.subcommand, color='red') - return - - if generate_module_templates: - self.write_module_templates() - post_actions = [self.remove_module_template] - else: - post_actions = [] - - # pass on the terraform args to the terraform command line - cmd = ' '.join([cmd] + args.terraform_args + [landscape]) - - return dict( - command=cmd, - post_actions=post_actions - ) - - def add_profile_vars(self, plan_variables, profile_name): - plan_variables['profile'] = '"%s"' % profile_name - - home_dir = os.environ.get('HOME') - plan_variables['shared_credentials_file'] = '"{}/.aws/credentials"'.format(home_dir) - # plan_variables['access_key'] = '"%s"' % aws.acess_key(profile_name) - # plan_variables['secret_key'] = '"%s"' % aws.secret_key(profile_name) - - def get_terraform_path(self): - if 'path' in self.cluster_config['terraform']: - return self.cluster_config['terraform']['path'] - - if 'paths' not in self.cluster_config['terraform']: - raise Exception("Could not find 'terraform.path' / 'terraform.paths' in the cluster configuration") - - paths = self.cluster_config['terraform']['paths'] - selected = self.selected_terraform_path - if selected is None: - raise Exception('You need to specify which path you want to use with --path-name. Options are: %s ' % paths.keys()) - - try: - return paths[selected] - except KeyError: - raise Exception("Could not find path '%s' in 'terraform.paths'. Options are: %s" % (selected, paths.keys())) - - def get_terraform_src_paths(self): - return [self.get_terraform_path()] - - def check_terraform_version(self): - expected_version = self.ops_config['terraform.version'] - - try: - execution = Popen(['terraform', '--version'], stdin=PIPE, stdout=PIPE, stderr=PIPE) - except Exception as e: - err('Terraform does not appear to be installed, please ensure terraform is in your PATH') - raise e - current_version, execution_error = execution.communicate() - current_version = current_version.replace('Terraform ', '').split('\n', 1)[0] - if expected_version == 'latest': - return current_version - - if current_version != expected_version and execution.returncode == 0: - raise Exception("Terraform should be %s, but you have %s. Please change your version." % ( - expected_version, current_version)) - - return current_version - - def get_templated_files(self): - for path in self.get_terraform_src_paths(): - for source, dest, content in self.template_files(path): - yield source, dest, content - - def copy_static_files(self, path, terraform_path): - shutil.copytree(os.path.join(self.root_dir, terraform_path), os.path.join(path, terraform_path)) - shutil.copytree(os.path.join(self.root_dir, 'modules'), os.path.join(path, 'modules')) - - def write_var_file(self, path, variables): - fname = os.path.join(path, 'ops.auto.tfvars') - with open(fname, 'w') as f: - for key, val in variables.items(): - if val[0] != '"': - val = '"{}"'.format(val) - f.write("{key} = {val}\n".format(key=key, val=val)) - - def write_module_templates(self, path=''): - for original, fname, result in self.get_templated_files(): - if path: - fname = os.path.join(path, fname) - folder = os.path.dirname(fname) - if not os.path.exists(folder): - os.makedirs(folder) - with open(fname, 'w') as f: - f.write(result.encode('utf8')) - - def remove_module_template(self): - filenames = set() - for source, dest, content in self.get_templated_files(): - filenames.add(dest) - for filename in filenames: - try: - os.remove(filename) - except: - err('Could not remove file %s' % filename) - - def get_terraform_module_paths(self, rendered): - """ Return list of relative module paths that are included in a terraform - config file """ - - return re.findall('source\s*=\s*"(.+?)"', rendered) - - def template_files(self, path): - result = [] - terraform_file_contents = self.get_terraform_files(path) - - for source in self.list_jinja_templates(path): - dest = source.replace(".jinja2", "") - config_all = self.cluster_config.all() - # Allow access to configuration values in Jinja2. Replace '.' with '_' to make them valid variable names - config_all['opsconfig'] = {k.replace('.', '_'): v for k, v in self.ops_config.all().items()} - config_all['selected_terraform_path'] = self.selected_terraform_path - if config_all.get('terraform', {}).get('boto_profile'): - self.add_profile_vars(config_all, config_all['terraform']['boto_profile']) - rendered = self.template.render(source, config_all) - - terraform_file_contents.append(rendered) - - result.append((source, dest, rendered)) - - # search for module references in all terraform files in this path, including rendered templates - for discovered_module in self.find_referenced_modules(path, terraform_file_contents): - result.extend(self.template_files(discovered_module)) - - return result - - def find_referenced_modules(self, base_path, terraform_files): - # look for terraform module references in this path - ret = set() - - for rendered in terraform_files: - for relative_module_path in self.get_terraform_module_paths(rendered): - new_path = os.path.normpath(base_path + '/' + relative_module_path) - ret.add(new_path) - - return ret - - def list_files(self, path, extension): - template_paths = [] - loader = FileSystemLoader(path) - for fname in loader.list_templates(): - name, ext = os.path.splitext(fname) - template_path = path + '/' + fname - # Do not go into terraform community provided modules - if ext == extension and '.terraform' not in template_path: - template_paths.append(template_path) - - return template_paths - - def get_terraform_files(self, path): - ret = [] - for fname in self.list_files(path, '.tf'): - with open(fname) as f: - ret.append(f.read()) - - return ret - - def list_jinja_templates(self, path): - return self.list_files(path, '.jinja2') - - def set_current_working_dir(self): - os.chdir(self.root_dir) + return self.run_v1_integration(args) + + def run_v1_integration(self, args): + return self.run_composition(args, self.cluster_config) + + def run_composition(self, args, config): + generator = TerraformCommandGenerator(self.root_dir, + config, + self.inventory_generator, + self.ops_config, + self.template) + return generator.generate(args) + + def run_v2_integration(self, args): + logging.basicConfig(level=logging.INFO) + config_path = os.path.join(self.cluster_config_path, '') + terraform_path = '../ee-k8s-infra/' if args.terraform_path is None else os.path.join(args.terraform_path, '') + terraform_path = '{}compositions/terraform/'.format(terraform_path) + composition_order = self.cluster_config.ops_config.config["compositions_order"]["terraform"] + + tf_config_generator = TerraformConfigGenerator(composition_order) + reverse_order = "destroy" == args.subcommand + compositions = tf_config_generator.get_sorted_compositions(config_path, reverse=reverse_order) + if len(compositions) == 0: + raise Exception("No terraform compositions were detected for it in %s.", self, config_path) + + return self.run_v2_compositions(args, config_path, tf_config_generator, terraform_path, compositions) + + def run_v2_compositions(self, args, config_path, tf_config_generator, terraform_path, compositions): + should_finish = False + return_code = 0 + for composition in compositions: + if should_finish: + logger.info("Skipping 'terraform %s' for composition '%s' because of previous failure.", args.subcommand, composition) + continue + + logger.info("Running composition: %s", composition) + tf_config_generator.generate_files(config_path, terraform_path, composition) + command = self.run_v2_composition(args, terraform_path, composition) + return_code = self.execute(command) + if return_code != 0: + should_finish = True + logger.error("Command finished with nonzero exit code for composition '%s'. Will skip remaining compositions.", composition) + + return return_code + + def run_v2_composition(self, args, terraform_path, composition): + config = self.cluster_config + config['terraform'] = {} + config['terraform']["path"] = "{}{}".format(terraform_path, composition) + config['terraform']["variables_file"] = "variables.tfvars.json" + config['cluster'] = hashlib.md5(self.cluster_config_path).hexdigest()[:6] + return self.run_composition(args, config) diff --git a/src/ops/hierarchical/__init__.py b/src/ops/hierarchical/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/ops/hierarchical/composition_config_generator.py b/src/ops/hierarchical/composition_config_generator.py new file mode 100644 index 0000000..75a0572 --- /dev/null +++ b/src/ops/hierarchical/composition_config_generator.py @@ -0,0 +1,118 @@ +# Copyright 2019 Adobe. All rights reserved. +# This file is licensed to you under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +# OF ANY KIND, either express or implied. See the License for the specific language +# governing permissions and limitations under the License. + +from config_generator import ConfigProcessor + +from ops import Executor +import logging +import os + +logger = logging.getLogger(__name__) + + +class CompositionConfigGenerator: + + def __init__(self, composition_order): + self.composition_sorter = CompositionSorter(composition_order) + self.generator = ConfigProcessor() + + def get_sorted_compositions(self, path, reverse=False): + all_compositions = self.discover_all_compositions(path) + compositions = self.sort_compositions(all_compositions, reverse) + return compositions + + def discover_all_compositions(self, path): + path_params = dict(self.split_path(x) for x in path.split('/')) + + composition = path_params.get("composition", None) + if composition: + return [composition] + + return self.get_compositions_in_path(path) + + def get_compositions_in_path(self, path): + compositions = [] + subpaths = os.listdir(path) + for subpath in subpaths: + if "composition=" in subpath: + composition = self.split_path(subpath)[1] + compositions.append(composition) + return compositions + + def run_sh(self, command, cwd=None, exit_on_error=True): + args = {"command": command} + execute = Executor() + exit_code = execute(args, cwd=cwd) + if exit_code != 0: + logger.error("Command finished with non zero exit code.") + if exit_on_error: + exit(exit_code) + + def split_path(self, value, separator='='): + if separator in value: + return value.split(separator) + return [value, ""] + + def sort_compositions(self, all_compositions, reverse=False): + return self.composition_sorter.get_sorted_compositions(all_compositions, reverse) + + def get_config_path_for_composition(self, path_prefix, composition): + prefix = os.path.join(path_prefix, '') + return path_prefix if "composition=" in path_prefix else "{}composition={}".format(prefix, composition) + + def get_terraform_path_for_composition(self, path_prefix, composition): + prefix = os.path.join(path_prefix, '') + return path_prefix if composition in path_prefix else "{}{}/".format(prefix, composition) + +class TerraformConfigGenerator(CompositionConfigGenerator, object): + + def __init__(self, composition_order): + super(TerraformConfigGenerator, self).__init__(composition_order) + + def generate_files(self, config_path, composition_path, composition): + config_path = self.get_config_path_for_composition(config_path, composition) + composition_path = self.get_terraform_path_for_composition(composition_path, composition) + self.generate_provider_config(config_path, composition_path) + self.generate_variables_config(config_path, composition_path) + + def generate_provider_config(self, config_path, composition_path): + output_file = "{}provider.tf.json".format(composition_path) + logger.info('Generating terraform config %s', output_file) + self.generator.process(path=config_path, + filters=["provider", "terraform"], + output_format="json", + output_file=output_file, + skip_interpolation_validation=True, + print_data=True) + + def generate_variables_config(self, config_path, composition_path): + output_file = "{}variables.tfvars.json".format(composition_path) + logger.info('Generating terraform config %s', output_file) + self.generator.process(path=config_path, + exclude_keys=["helm", "provider"], + enclosing_key="config", + output_format="json", + output_file=output_file, + + # skip validation, since some interpolations might not be able to be resolved + # at this point (eg. {{outputs.*}}, which reads from a terraform state file + # that might not yet be created) + skip_interpolation_validation=True, + print_data=True) + + +class CompositionSorter(object): + def __init__(self, composition_order): + self.composition_order = composition_order + + def get_sorted_compositions(self, compositions, reverse=False): + result = filter(lambda x: x in compositions, self.composition_order) + return tuple(reversed(result)) if reverse else result + diff --git a/src/ops/hierarchical/config_generator.py b/src/ops/hierarchical/config_generator.py new file mode 100755 index 0000000..eecf458 --- /dev/null +++ b/src/ops/hierarchical/config_generator.py @@ -0,0 +1,238 @@ +#Copyright 2019 Adobe. All rights reserved. +#This file is licensed to you under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. You may obtain a copy +#of the License at http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software distributed under +#the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +#OF ANY KIND, either express or implied. See the License for the specific language +#governing permissions and limitations under the License. + +import os +from collections import OrderedDict +import pathlib2 +from deepmerge import Merger +import yaml +import json +from interpolation import InterpolationResolver, InterpolationValidator +from remote_state import S3TerraformRemoteStateRetriever +from ops.cli import display + + +class ConfigGenerator(object): + """ + this class is used to create a config generator object which will be used to generate cluster definition files + from the hierarchy of directories. The class implements methods that performs deep merging on dicts so the end result + will contain merged data on each layer. + """ + + def __init__(self, cwd, path): + self.cwd = cwd + self.path = path + self.hierarchy = self.generate_hierarchy() + self.generated_data = OrderedDict() + self.interpolation_validator = InterpolationValidator() + + @staticmethod + def yaml_dumper(): + try: + from yaml import CLoader as Loader, CDumper as Dumper + except ImportError: + from yaml import Loader, Dumper + from yaml.representer import SafeRepresenter + _mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG + + def dict_representer(dumper, data): + return dumper.represent_dict(data.iteritems()) + + def dict_constructor(loader, node): + return OrderedDict(loader.construct_pairs(node)) + + Dumper.add_representer(OrderedDict, dict_representer) + Loader.add_constructor(_mapping_tag, dict_constructor) + + Dumper.add_representer(str, + SafeRepresenter.represent_str) + + Dumper.add_representer(unicode, + SafeRepresenter.represent_unicode) + return Dumper + + @staticmethod + def get_yaml_from_path(working_directory, path): + yaml_files = [] + for yaml_file in os.listdir(path): + if yaml_file.endswith(".yaml"): + yaml_files.append(os.path.join(working_directory, yaml_file)) + return sorted(yaml_files) + + @staticmethod + def yaml_get_content(yaml_file): + with open(yaml_file, 'r') as f: + content = yaml.load(f) + return content if content else {} + + @staticmethod + def merge_value(reference, new_value): + merger = Merger([(list, ["append"]), (dict, ["merge"])], ["override"], ["override"]) + if isinstance(new_value, (list, set, dict)): + new_reference = merger.merge(reference, new_value) + else: + raise TypeError("Cannot handle merge_value of type {}".format(type(new_value))) + return new_reference + + @staticmethod + def merge_yamls(values, yaml_content): + for key, value in yaml_content.iteritems(): + if key in values and type(values[key]) != type(value): + raise Exception("Failed to merge key '{}', because of mismatch in type: {} vs {}" + .format(key, type(values[key]), type(value))) + if key in values and not isinstance(value, (basestring, int, bool)): + values[key] = ConfigGenerator.merge_value(values[key], value) + else: + values[key] = value + + def generate_hierarchy(self): + """ + the method will go through the hierarchy of directories and create an ordered list of directories to be used + when merging data at each layer + :return: returns a list of directories in a priority order (from less specific to more specific) + """ + hierarchy = [] + full_path = pathlib2.Path(self.path) + for path in full_path.parts: + os.chdir(path) + new_path = os.path.relpath(os.getcwd(), self.cwd) + hierarchy.append(self.get_yaml_from_path(new_path, os.getcwd())) + os.chdir(self.cwd) + return hierarchy + + def process_hierarchy(self): + merged_values = OrderedDict() + for yaml_files in self.hierarchy: + for yaml_file in yaml_files: + yaml_content = self.yaml_get_content(yaml_file) + self.merge_yamls(merged_values, yaml_content) + self.generated_data = merged_values + + def get_values_from_dir_path(self): + values = {} + full_path = pathlib2.Path(self.path) + for path in full_path.parts[1:]: + split_value = path.split('=') + values[split_value[0]] = split_value[1] + return values + + def output_yaml_data(self, data): + return yaml.dump(data, Dumper=ConfigGenerator.yaml_dumper(), default_flow_style=False) + + def yaml_to_json(self, yaml_data): + return json.dumps(yaml.load(yaml_data), indent=4) + + def output_data(self, data, format): + yaml_data = self.output_yaml_data(data) + if "yaml" in format: + return yaml_data + elif "json" in format: + return self.yaml_to_json(yaml_data) + raise Exception("Unknown output format: {}".format(format)) + + def add_enclosing_key(self, key): + return {key: self.generated_data} + + def filter_data(self, keys): + self.generated_data = {key: self.generated_data[key] for key in keys if key in self.generated_data} + + def exclude_keys(self, keys): + for key in keys: + if key in self.generated_data: + del self.generated_data[key] + + def add_dynamic_data(self): + remote_state_retriever = S3TerraformRemoteStateRetriever() + if "remote_states" in self.generated_data: + state_files = self.generated_data["remote_states"] + remote_states = remote_state_retriever.get_dynamic_data(state_files) + self.merge_value(self.generated_data, remote_states) + + def resolve_interpolations(self): + resolver = InterpolationResolver(self.generated_data) + self.generated_data = resolver.resolve_interpolations(self.generated_data) + + def validate_interpolations(self): + self.interpolation_validator.check_all_interpolations_resolved(self.generated_data) + + +class ConfigProcessor(object): + + def process(self, cwd=None, path=None, filters=(), exclude_keys=(), enclosing_key=None, output_format=yaml, print_data=False, + output_file=None, skip_interpolations=False, skip_interpolation_validation=False, display_command=True): + + path = self.get_relative_path(path) + if display_command: + command = self.get_sh_command(path, filters, enclosing_key, output_format, print_data, output_file, + skip_interpolations, skip_interpolation_validation) + display(command, color='yellow') + + if skip_interpolations: + skip_interpolation_validation = True + + if cwd is None: + cwd = os.getcwd() + + generator = ConfigGenerator(cwd, path) + generator.generate_hierarchy() + generator.process_hierarchy() + + if not skip_interpolations: + generator.resolve_interpolations() + generator.add_dynamic_data() + generator.resolve_interpolations() + + if len(filters) > 0: + generator.filter_data(filters) + + if len(exclude_keys) > 0: + generator.exclude_keys(exclude_keys) + + if not skip_interpolation_validation: + generator.validate_interpolations() + + data = generator.add_enclosing_key(enclosing_key) if enclosing_key else generator.generated_data + + formatted_data = generator.output_data(data, output_format) + + if print_data: + print(formatted_data) + + if output_file: + with open(output_file, 'w') as f: + f.write(formatted_data) + + return data + + def get_relative_path(self, path): + cwd = os.path.join(os.getcwd(), '') + if path.startswith(cwd): + return path[len(cwd):] + return path + + + @staticmethod + def get_sh_command(path, filters, enclosing_key, output_format, print_data, + output_file, skip_interpolations, skip_interpolation_validation): + command = "ops {} config --format {}".format(path, output_format) + for filter in filters: + command += " --filter {}".format(filter) + if enclosing_key: + command += " --enclosing-key {}".format(enclosing_key) + if output_file: + command += " --output-file {}".format(output_file) + if print_data: + command += " --print-data" + if skip_interpolations: + command += " --skip-interpolation-resolving" + if skip_interpolation_validation: + command += " --skip-interpolation-validation" + + return command diff --git a/src/ops/hierarchical/inject_secrets.py b/src/ops/hierarchical/inject_secrets.py new file mode 100644 index 0000000..519c397 --- /dev/null +++ b/src/ops/hierarchical/inject_secrets.py @@ -0,0 +1,62 @@ +# Copyright 2019 Adobe. All rights reserved. +# This file is licensed to you under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +# OF ANY KIND, either express or implied. See the License for the specific language +# governing permissions and limitations under the License. + +from secret_resolvers import AggregatedSecretResolver +try: + from functools import lru_cache +except ImportError: + from backports.functools_lru_cache import lru_cache + +class SecretInjector(object): + """ + Resolve secrets in the form: + {{ssm.path(/aam/artifactory/grafana/password).aws_profile(aam-npe)}} + or + {{vault.kv2.path(ethos/k8s-ethos-config/thrash/aws/ClusterIngressTLS).field(Key)}} + """ + + def __init__(self): + self.resolver = AggregatedSecretResolver() + + def is_interpolation(self, value): + return value.startswith('{{') and value.endswith('}}') + + @lru_cache(maxsize=2048) + def inject_secret(self, line): + """ + Check if value is an interpolation and try to resolve it. + Uses a cache, in order to not fetch same secret multiple times. + """ + if not self.is_interpolation(line): + return line + + # remove {{ and }} + updated_line = line[2:-2] + + # parse each key/value (eg. path=my_pwd) + parts = updated_line.split('.') + if len(parts) <= 1: + return line + + secret_type = parts[0] + + secret_params = {} + for part in parts: + if '(' not in part: + secret_params[part] = None + else: + key = part.split('(')[0] + value = part.split('(')[1].split(')')[0] + secret_params[key] = value + + if self.resolver.supports(secret_type): + return self.resolver.resolve(secret_type, secret_params) + else: + return line diff --git a/src/ops/hierarchical/interpolation.py b/src/ops/hierarchical/interpolation.py new file mode 100644 index 0000000..7984357 --- /dev/null +++ b/src/ops/hierarchical/interpolation.py @@ -0,0 +1,103 @@ +# Copyright 2019 Adobe. All rights reserved. +# This file is licensed to you under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +# OF ANY KIND, either express or implied. See the License for the specific language +# governing permissions and limitations under the License. + +from inject_secrets import SecretInjector + + +def is_interpolation(input): + return '{{' in input and '}}' in input + + +class DictIterator(): + + def loop_all_items(self, data, process_func): + if isinstance(data, basestring): + return process_func(data) + if isinstance(data, list): + items = [] + for item in data: + items.append(self.loop_all_items(item, process_func)) + return items + if isinstance(data, dict): + for key in data: + value = data[key] + resolved_value = self.loop_all_items(value, process_func) + data[key] = resolved_value + return data + + +class InterpolationResolver(DictIterator): + + def __init__(self, data, secrets_injector=SecretInjector()): + self.generated_data = data + self.secrets_injector = secrets_injector + self.from_dict_injector = FromDictInjector() + + def resolve_interpolations(self, data): + return self.loop_all_items(data, self.resolve_interpolation) + + def resolve_interpolation(self, line): + if not is_interpolation(line): + return line + + updated_line = self.secrets_injector.inject_secret(line) + updated_line = self.from_dict_injector.resolve(updated_line, self.generated_data) + return updated_line + + +class InterpolationValidator(DictIterator): + + def __init__(self): + pass + + def check_all_interpolations_resolved(self, data): + return self.loop_all_items(data, self.validate_value) + + def validate_value(self, value): + if is_interpolation(value): + raise Exception("Interpolation could not be resolved {} and strict validation was enabled.".format(value)) + return value + + +class FromDictInjector(): + + def __init__(self): + self.results = {} + + def resolve(self, line, data): + """ + :param input: {{env.name}} + :param data: (env: name: dev) + :return: dev + """ + + self.parse_leaves(data, "") + for key, value in self.results.iteritems(): + placeholder = "{{" + key + "}}" + if placeholder not in line: + continue + elif isinstance(value, (int, bool)): + return value + else: + line = line.replace(placeholder, value) + return line + + def parse_leaves(self, data, partial_key): + if isinstance(data, (basestring, int, bool)): + self.results[partial_key] = data + return + if isinstance(data, dict): + for key in data: + value = data[key] + new_key = partial_key + if new_key: + new_key += "." + new_key += key + self.parse_leaves(value, new_key) diff --git a/src/ops/hierarchical/remote_state.py b/src/ops/hierarchical/remote_state.py new file mode 100644 index 0000000..d943619 --- /dev/null +++ b/src/ops/hierarchical/remote_state.py @@ -0,0 +1,32 @@ +#Copyright 2019 Adobe. All rights reserved. +#This file is licensed to you under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. You may obtain a copy +#of the License at http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software distributed under +#the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +#OF ANY KIND, either express or implied. See the License for the specific language +#governing permissions and limitations under the License. + +import boto3 +import json + + +class S3TerraformRemoteStateRetriever: + @staticmethod + def get_s3_client(bucket_name, bucket_key, boto_profile): + session = boto3.session.Session(profile_name=boto_profile) + client = session.client('s3') + try: + bucket_object = client.get_object(Bucket=bucket_name, Key=bucket_key)["Body"].read() + return json.loads(bucket_object) + except (client.exceptions.NoSuchKey, client.exceptions.NoSuchBucket): + return [] + + def get_dynamic_data(self, remote_states): + generated_data = {"outputs": {}} + for state in remote_states: + bucket_object = self.get_s3_client(state["s3_bucket"], state["s3_key"], state["aws_profile"]) + if "outputs" in bucket_object: + generated_data["outputs"][state["name"]] = bucket_object["outputs"] + return generated_data diff --git a/src/ops/hierarchical/secret_resolvers.py b/src/ops/hierarchical/secret_resolvers.py new file mode 100644 index 0000000..6031c17 --- /dev/null +++ b/src/ops/hierarchical/secret_resolvers.py @@ -0,0 +1,59 @@ +#Copyright 2019 Adobe. All rights reserved. +#This file is licensed to you under the Apache License, Version 2.0 (the "License"); +#you may not use this file except in compliance with the License. You may obtain a copy +#of the License at http://www.apache.org/licenses/LICENSE-2.0 + +#Unless required by applicable law or agreed to in writing, software distributed under +#the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +#OF ANY KIND, either express or implied. See the License for the specific language +#governing permissions and limitations under the License. + +from ops.simplessm import SimpleSSM + +class SecretResolver: + def supports(self, secret_type): + return False + + def resolve(self, secret_type, secret_params): + return None + + +class SSMSecretResolver(SecretResolver): + def supports(self, secret_type): + return secret_type == "ssm" + + def resolve(self, secret_type, secret_params): + path = self.get_param_or_exception("path", secret_params) + aws_profile = self.get_param_or_exception("aws_profile", secret_params) + region_name = secret_params.get("region_name", "us-east-1") + ssm = SimpleSSM(aws_profile, region_name) + return ssm.get(path) + + def get_param_or_exception(self, key, params): + if key not in params: + raise Exception("Could not find required key '{}' in the secret params: {}".format(key, params)) + return params[key] + + +# TODO - vault resolver +class VaultSecretResolver(SecretResolver): + def supports(self, secret_type): + return False + + def resolve(self, secret_type, secret_params): + return None + + +class AggregatedSecretResolver(SecretResolver): + + SECRET_RESOLVERS = (SSMSecretResolver(), VaultSecretResolver()) + + def supports(self, secret_type): + return any([resolver.supports(secret_type) for resolver in self.SECRET_RESOLVERS]) + + def resolve(self, secret_type, secret_params): + for resolver in self.SECRET_RESOLVERS: + if resolver.supports(secret_type): + return resolver.resolve(secret_type, secret_params) + + raise Exception("Could not resolve secret type '{}' with params {}".format(secret_type, secret_params)) diff --git a/src/ops/main.py b/src/ops/main.py index d21cc97..228f8f3 100644 --- a/src/ops/main.py +++ b/src/ops/main.py @@ -12,6 +12,7 @@ import logging import os +from cli.config_generator import ConfigGeneratorParserConfig, ConfigGeneratorRunner from simpledi import Container, auto, cache, instance, ListInstanceProvider from cli.config import ClusterConfigGenerator, ClusterConfig @@ -23,6 +24,7 @@ from cli.ssh import SshParserConfig, SshRunner from cli.sync import SyncParserConfig, SyncRunner from cli.terraform import TerraformParserConfig, TerraformRunner +from cli.helmfile import HelmfileParserConfig, HelmfileRunner from cli.packer import PackerParserConfig, PackerRunner from inventory.generator import DirInventoryGenerator, ShellInventoryGenerator, AnsibleInventory, \ PluginInventoryGenerator, InventoryGenerator, CachedInventoryGenerator @@ -57,6 +59,8 @@ def __init__(self, argv=None): self.play_runner = auto(PlaybookRunner) self.run_runner = auto(CommandRunner) self.sync_runner = auto(SyncRunner) + self.helmfile_runner = auto(HelmfileRunner) + self.config_runner = auto(ConfigGeneratorRunner) self.cluster_config = cache(auto(ClusterConfig)) self.ops_config = cache(auto(OpsConfig)) @@ -80,6 +84,8 @@ def configure_parsers(self): parsers.add(auto(PlaybookParserConfig)) parsers.add(auto(CommandParserConfig)) parsers.add(auto(SyncParserConfig)) + parsers.add(auto(HelmfileParserConfig)) + parsers.add(auto(ConfigGeneratorParserConfig)) self.sub_parsers = parsers def configure_inventory(self): @@ -112,6 +118,7 @@ def configure(self): # Bind some very useful dependencies self.console_args = cache(instance(args)) + self.command = lambda c: self.console_args.command self.cluster_config_path = cache(lambda c: get_cluster_config_path(c.root_dir, c.console_args)) self.root_dir = cache(lambda c: get_root_dir(c.console_args)) self.cluster_name = lambda c: c.cluster_config['cluster'] @@ -135,9 +142,14 @@ def run(self): def run(args=None): """ App entry point """ app_container = AppContainer(args) - ret = app_container.execute(app_container.run()) - sys.exit(ret or None) + output = app_container.run() + + if type(output) is int: + return output + else: + ret = app_container.execute(output) + sys.exit(ret) def get_cluster_config_path(root_dir, console_args): """ Return config path + root_dir if path is relative """ diff --git a/src/ops/simplessm.py b/src/ops/simplessm.py index 534521e..77aad3d 100644 --- a/src/ops/simplessm.py +++ b/src/ops/simplessm.py @@ -10,7 +10,6 @@ #!/usr/bin/env python -from ops.cli import display from botocore.exceptions import ClientError import boto3 import os diff --git a/src/ops/terraform/__init__.py b/src/ops/terraform/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/ops/terraform/terraform_cmd_generator.py b/src/ops/terraform/terraform_cmd_generator.py new file mode 100644 index 0000000..c646b22 --- /dev/null +++ b/src/ops/terraform/terraform_cmd_generator.py @@ -0,0 +1,467 @@ +# Copyright 2019 Adobe. All rights reserved. +# This file is licensed to you under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may obtain a copy +# of the License at http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software distributed under +# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS +# OF ANY KIND, either express or implied. See the License for the specific language +# governing permissions and limitations under the License. + +import os +import re +import shutil + +from jinja2 import FileSystemLoader +from subprocess import Popen, PIPE +from ops.cli import err, display + + +class TerraformCommandGenerator(object): + def __init__(self, root_dir, cluster_config, inventory_generator, ops_config, template): + self.cluster_config = cluster_config + self.root_dir = root_dir + self.inventory_generator = inventory_generator + self.ops_config = ops_config + self.template = template + + def generate(self, args): + + self.selected_terraform_path = args.path_name + self.set_current_working_dir() + current_terraform_version = self.check_terraform_version() + config = self.cluster_config + + current_terraform_version_major = int(current_terraform_version.split('.')[1]) + if 'enable_consul_remote_state' in config['terraform']: + terraform_remote_state = config['terraform']['enable_consul_remote_state'] + elif config['terraform'].get('state', {'type': None}).get('type') == 's3': + terraform_remote_state = 'true' + else: + terraform_remote_state = 'false' + + terraform_config = config.get('terraform', {}) + terraform_path = self.get_terraform_path() + generate_module_templates = False + + plan_variables = terraform_config.get('vars', {}) + # plan_variables['cluster'] = config['cluster'] + if self.cluster_config.has_ssh_keys: + plan_variables['has_ssh_keys'] = True + plan_variables['cluster_ssh_pubkey_file'] = self.cluster_config.cluster_ssh_pubkey_file + plan_variables['cluster_ssh_prvkey_file'] = self.cluster_config.cluster_ssh_prvkey_file + if terraform_config.get('boto_profile'): + self.add_profile_vars(plan_variables, terraform_config.get('boto_profile')) + + vars = '' + for key, val in plan_variables.items(): + vars += " -var '%s=%s' " % (key, val) + + state_file = 'terraform.{cluster}.tfstate'.format(cluster=config['cluster']) + plan_file = 'terraform.{cluster}.plan'.format(cluster=config['cluster']) + landscape = '' + + if current_terraform_version_major >= 9: + if args.force_copy: + terraform_init_command = 'terraform init -force-copy && ' + else: + terraform_init_command = 'terraform init && ' + # regarding state location we give priority to the cli parameter + if args.state_location == 'remote': + state_argument = '' + state_out_argument = '' + elif args.state_location == 'local': + state_argument = "-state={state_file}".format( + state_file=state_file + ) + state_out_argument = "-state-out={state_file}".format( + state_file=state_file + ) + else: + # no cli parameter, decide based on config file + if terraform_remote_state == 'true': + state_argument = '' + state_out_argument = '' + else: + state_argument = "-state={state_file}".format( + state_file=state_file + ) + state_out_argument = "-state-out={state_file}".format( + state_file=state_file + ) + else: + state_argument = "-state={state_file}".format( + state_file=state_file + ) + state_out_argument = "-state-out={state_file}".format( + state_file=state_file + ) + terraform_init_command = '' + + if args.subcommand == 'template': + if args.template_location: + self.copy_static_files(args.template_location, terraform_path) + self.write_module_templates(args.template_location) + self.write_var_file(os.path.join(args.template_location, terraform_path), plan_variables) + else: + for original, fname, contents in self.get_templated_files(): + display("# %s -> %s" % (original, fname), color="green") + display("# --------------", color="green") + display(contents) + return + + if "variables_file" in config['terraform']: + variables_file = ' -var-file="{}" '.format(config['terraform']["variables_file"]) + else: + variables_file = ' ' + + if args.subcommand == 'plan': + generate_module_templates = True + terraform_refresh_command = remove_local_cache = '' + if args.do_refresh: + terraform_refresh_command = "terraform refresh" \ + "{variables_file}" \ + " -input=false {vars} {state_argument} && ".format(vars=vars, + state_argument=state_argument, + variables_file=variables_file) + + if self.ops_config['terraform.landscape'] and not args.raw_plan_output: + landscape = '| landscape' + + if self.ops_config['terraform.remove_local_cache']: + remove_local_cache = 'rm -rf .terraform && ' + + cmd = "cd {root_dir}/{terraform_path} && " \ + "{remove_local_cache}" \ + "terraform get -update && " \ + "{terraform_init_command}" \ + "{terraform_refresh_command}" \ + "terraform plan " \ + "{variables_file}" \ + "-out={plan_file} -refresh=false -input=false {vars} {state_argument}".format( + root_dir=self.root_dir, + terraform_path=terraform_path, + terraform_init_command=terraform_init_command, + vars=vars, + state_argument=state_argument, + plan_file=plan_file, + terraform_refresh_command=terraform_refresh_command, + remove_local_cache=remove_local_cache, + variables_file=variables_file + ) + + elif args.subcommand == 'apply': + # the following is to have auxiliary rendered/templated files like cloudinit.yaml + # that also needs templating. Without it, plan works but apply does not for this kind of files + # todo maybe this deserves a better implementation later + generate_module_templates = True + + auto_approve = '' + if args.auto_approve: + auto_approve = '-auto-approve' + + self.inventory_generator.clear_cache() + if args.skip_plan: + # Run Terraform apply without running a plan first + cmd = "cd {root_dir}/{terraform_path} && {terraform_init_command}" \ + "rm -f {plan_file} && terraform apply {vars}" \ + "-refresh=true {state_argument} {variables_file} {auto_approve}".format( + plan_file=plan_file, + root_dir=self.root_dir, + state_argument=state_argument, + terraform_init_command=terraform_init_command, + terraform_path=terraform_path, + vars=vars, + variables_file=variables_file, + auto_approve=auto_approve + ) + else: + cmd = "cd {root_dir}/{terraform_path} && " \ + "terraform apply " \ + "-refresh=true {state_out_argument} {plan_file} {variables_file}; code=$?; rm -f {plan_file}; exit $code".format( + plan_file=plan_file, + root_dir=self.root_dir, + state_out_argument=state_out_argument, + terraform_path=terraform_path, + vars=vars, + variables_file=variables_file + ) + + elif args.subcommand == 'destroy': + generate_module_templates = True + remove_local_cache = '' + + if self.ops_config['terraform.remove_local_cache']: + remove_local_cache = 'rm -rf .terraform && ' + cmd = "cd {root_dir}/{terraform_path} && " \ + "{remove_local_cache}" \ + "{terraform_init_command}" \ + "terraform plan -destroy " \ + "-refresh=true {vars} {variables_file} {state_argument} && " \ + "terraform destroy {vars} {variables_file} {state_argument} -refresh=true".format( + root_dir=self.root_dir, + terraform_path=terraform_path, + variables_file=variables_file, + vars=vars, + state_argument=state_argument, + terraform_init_command=terraform_init_command, + remove_local_cache=remove_local_cache + ) + elif args.subcommand == 'output': + cmd = "cd {root_dir}/{terraform_path} && " \ + "terraform output {state_argument} {output}".format( + root_dir=self.root_dir, + terraform_path=terraform_path, + output=args.var, + state_argument=state_argument + ) + elif args.subcommand == 'refresh': + generate_module_templates = True + cmd = "cd {root_dir}/{terraform_path} && " \ + "terraform get -update && " \ + "terraform refresh {variables_file} {state_argument} {vars}".format( + root_dir=self.root_dir, + terraform_path=terraform_path, + vars=vars, + variables_file=variables_file, + state_argument=state_argument + ) + elif args.subcommand == 'taint' or args.subcommand == 'untaint': + cmd = "cd {root_dir}/{terraform_path} && " \ + "{terraform_init_command}" \ + "terraform {command} {state_argument} -module={module} {resource}".format( + root_dir=self.root_dir, + command=args.subcommand, + terraform_path=terraform_path, + resource=args.resource, + module=args.module, + state_argument=state_argument, + terraform_init_command=terraform_init_command + ) + elif args.subcommand == 'show': + if args.plan: + state = plan_file + else: + state = state_file + + cmd = "cd {root_dir}/{terraform_path} && " \ + "terraform show {state}".format( + root_dir=self.root_dir, + terraform_path=terraform_path, + state=state + ) + elif args.subcommand == 'import': + generate_module_templates = True + cmd = "cd {root_dir}/{terraform_path} && " \ + "terraform import {state_argument} {vars} module.{module}.{resource} {name}".format( + root_dir=self.root_dir, + command=args.subcommand, + terraform_path=terraform_path, + resource=args.resource, + module=args.module, + name=args.name, + state_argument=state_argument, + vars=vars, + ) + elif args.subcommand == 'console': + generate_module_templates = True + cmd = "cd {root_dir}/{terraform_path} && " \ + "terraform {command} {state_argument} {vars}".format( + root_dir=self.root_dir, + command=args.subcommand, + terraform_path=terraform_path, + state_argument=state_argument, + vars=vars, + ) + elif args.subcommand == 'validate': + generate_module_templates = True + cmd = "cd {root_dir}/{terraform_path} && {terraform_init_command} " \ + "terraform {command} {vars} {variables_file}".format( + command=args.subcommand, + root_dir=self.root_dir, + terraform_init_command=terraform_init_command, + terraform_path=terraform_path, + vars=vars, + variables_file=variables_file + ) + elif args.subcommand is not None: + # Examples: + # - command = "state push errored.tfstate" + # - command = "force-unlock " + generate_module_templates = True + cmd = "cd {root_dir}/{terraform_path} && {terraform_init_command} " \ + "terraform {command}".format( + command=args.subcommand, + root_dir=self.root_dir, + terraform_init_command=terraform_init_command, + terraform_path=terraform_path, + ) + else: + display('Terraform subcommand \'%s\' not found' % args.subcommand, color='red') + return + + if generate_module_templates: + self.write_module_templates() + post_actions = [self.remove_module_template] + else: + post_actions = [] + + # pass on the terraform args to the terraform command line + cmd = ' '.join([cmd] + args.terraform_args + [landscape]) + + return dict( + command=cmd, + post_actions=post_actions + ) + + def add_profile_vars(self, plan_variables, profile_name): + plan_variables['profile'] = '"%s"' % profile_name + + home_dir = os.environ.get('HOME') + plan_variables['shared_credentials_file'] = '"{}/.aws/credentials"'.format(home_dir) + # plan_variables['access_key'] = '"%s"' % aws.acess_key(profile_name) + # plan_variables['secret_key'] = '"%s"' % aws.secret_key(profile_name) + + def get_terraform_path(self): + if 'path' in self.cluster_config['terraform']: + return self.cluster_config['terraform']['path'] + + if 'paths' not in self.cluster_config['terraform']: + raise Exception("Could not find 'terraform.path' / 'terraform.paths' in the cluster configuration") + + paths = self.cluster_config['terraform']['paths'] + selected = self.selected_terraform_path + if selected is None: + raise Exception( + 'You need to specify which path you want to use with --path-name. Options are: %s ' % paths.keys()) + + try: + return paths[selected] + except KeyError: + raise Exception("Could not find path '%s' in 'terraform.paths'. Options are: %s" % (selected, paths.keys())) + + def get_terraform_src_paths(self): + return [self.get_terraform_path()] + + def check_terraform_version(self): + expected_version = self.ops_config['terraform.version'] + + try: + execution = Popen(['terraform', '--version'], stdin=PIPE, stdout=PIPE, stderr=PIPE) + except Exception as e: + err('Terraform does not appear to be installed, please ensure terraform is in your PATH') + raise e + current_version, execution_error = execution.communicate() + current_version = current_version.replace('Terraform ', '').split('\n', 1)[0] + if expected_version == 'latest': + return current_version + + if current_version != expected_version and execution.returncode == 0: + raise Exception("Terraform should be %s, but you have %s. Please change your version." % ( + expected_version, current_version)) + + return current_version + + def get_templated_files(self): + for path in self.get_terraform_src_paths(): + for source, dest, content in self.template_files(path): + yield source, dest, content + + def copy_static_files(self, path, terraform_path): + shutil.copytree(os.path.join(self.root_dir, terraform_path), os.path.join(path, terraform_path)) + shutil.copytree(os.path.join(self.root_dir, 'modules'), os.path.join(path, 'modules')) + + def write_var_file(self, path, variables): + fname = os.path.join(path, 'ops.auto.tfvars') + with open(fname, 'w') as f: + for key, val in variables.items(): + if val[0] != '"': + val = '"{}"'.format(val) + f.write("{key} = {val}\n".format(key=key, val=val)) + + def write_module_templates(self, path=''): + for original, fname, result in self.get_templated_files(): + if path: + fname = os.path.join(path, fname) + folder = os.path.dirname(fname) + if not os.path.exists(folder): + os.makedirs(folder) + with open(fname, 'w') as f: + f.write(result.encode('utf8')) + + def remove_module_template(self): + filenames = set() + for source, dest, content in self.get_templated_files(): + filenames.add(dest) + for filename in filenames: + try: + os.remove(filename) + except: + err('Could not remove file %s' % filename) + + def get_terraform_module_paths(self, rendered): + """ Return list of relative module paths that are included in a terraform + config file """ + + return re.findall('source\s*=\s*"(.+?)"', rendered) + + def template_files(self, path): + result = [] + terraform_file_contents = self.get_terraform_files(path) + + for source in self.list_jinja_templates(path): + dest = source.replace(".jinja2", "") + config_all = self.cluster_config.all() + # Allow access to configuration values in Jinja2. Replace '.' with '_' to make them valid variable names + config_all['opsconfig'] = {k.replace('.', '_'): v for k, v in self.ops_config.all().items()} + config_all['selected_terraform_path'] = self.selected_terraform_path + if config_all.get('terraform', {}).get('boto_profile'): + self.add_profile_vars(config_all, config_all['terraform']['boto_profile']) + rendered = self.template.render(source, config_all) + + terraform_file_contents.append(rendered) + + result.append((source, dest, rendered)) + + # search for module references in all terraform files in this path, including rendered templates + for discovered_module in self.find_referenced_modules(path, terraform_file_contents): + result.extend(self.template_files(discovered_module)) + + return result + + def find_referenced_modules(self, base_path, terraform_files): + # look for terraform module references in this path + ret = set() + + for rendered in terraform_files: + for relative_module_path in self.get_terraform_module_paths(rendered): + new_path = os.path.normpath(base_path + '/' + relative_module_path) + ret.add(new_path) + + return ret + + def list_files(self, path, extension): + template_paths = [] + loader = FileSystemLoader(path) + for fname in loader.list_templates(): + name, ext = os.path.splitext(fname) + template_path = path + '/' + fname + # Do not go into terraform community provided modules + if ext == extension and '.terraform' not in template_path: + template_paths.append(template_path) + + return template_paths + + def get_terraform_files(self, path): + ret = [] + for fname in self.list_files(path, '.tf'): + with open(fname) as f: + ret.append(f.read()) + + return ret + + def list_jinja_templates(self, path): + return self.list_files(path, '.jinja2') + + def set_current_working_dir(self): + os.chdir(self.root_dir) diff --git a/tests/e2e/common.py b/tests/e2e/common.py new file mode 100644 index 0000000..68d13de --- /dev/null +++ b/tests/e2e/common.py @@ -0,0 +1,8 @@ +import pytest +import os + + +@pytest.fixture +def test_path(): + path = os.path.abspath(__file__) + return os.path.dirname(path) diff --git a/tests/unit/test_composition_config_generator.py b/tests/unit/test_composition_config_generator.py new file mode 100644 index 0000000..2d5cb9e --- /dev/null +++ b/tests/unit/test_composition_config_generator.py @@ -0,0 +1,19 @@ +from ops.hierarchical.composition_config_generator import CompositionSorter + + +def test_composition_discovery(): + expected_order = ["comp1", "compB", "comp3"] + composition_sorter = CompositionSorter(composition_order=expected_order) + assert composition_sorter.get_sorted_compositions(["comp3", "comp1", "compB"]) == expected_order + + +def test_unknown_composition_is_ignored(): + expected_order = ["comp1", "comp2"] + composition_sorter = CompositionSorter(composition_order=expected_order) + assert composition_sorter.get_sorted_compositions(["comp2", "comp1", "unknown_composition"]) == expected_order + + +def test_reverse_order(): + expected_order = ["comp1", "comp2"] + composition_sorter = CompositionSorter(composition_order=expected_order) + assert composition_sorter.get_sorted_compositions(["comp1", "comp2"], reverse=True) == ("comp2", "comp1")