Skip to content

Commit

Permalink
add __version__ file (#239) (#240)
Browse files Browse the repository at this point in the history
* add __version__ file (#239)

* add __version__ file

* update orbs

* support backend migration (#237)

* support backend migration

* heal diff

* treat thing

* bump version

* Adding notifications.

* try to run this test again

* Remove unnecessary context.

* Remove unnecessary post-step.

* add sleep between plan storage and run

* attach at workspace

* fix more test stuff

* fix more test stuff

* more test playing

* check if debug is where it all started

* change test order?

* change notifications.

* bump examples submodule

* log to info diff in plan

* update how we calculate diff in tf plugin

* improve plan diff

* more test fix

* Updated config.yml

---------

Co-authored-by: Nely Nehemia <[email protected]>
Co-authored-by: Bartosz Kosciug <[email protected]>
Co-authored-by: bartoszkosciug <[email protected]>
  • Loading branch information
4 people authored Apr 10, 2023
1 parent 6bb246e commit 1faee85
Show file tree
Hide file tree
Showing 17 changed files with 523 additions and 138 deletions.
21 changes: 19 additions & 2 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,24 @@
version: 2.1

job-post-steps: &job-post-steps
post-steps:
- slack/notify_failed

unittest-post-steps: &unittest-post-steps
post-steps:
- store_test_results:
path: /home/circleci/project/nosetests.xml
- store_artifacts:
path: /home/circleci/project/coverage.xml
prefix: tests
- slack/notify_failed

orbs:
node: cloudify/public-unittest-orb@1 #orb version
wagonorb: cloudify/wagon-bulder-orb@2 #orb version
releaseorb: cloudify/release-orb@1 #orb version
managerorb: cloudify/manager-orb@2
slack: cloudify/notify-slack@2

checkout:
post:
Expand All @@ -31,6 +45,8 @@ commands:

prepare_test_manager:
steps:
- attach_workspace:
at: workspace
- run: ecosystem-test prepare-test-manager -l $TEST_LICENSE --generate-new-aws-token -es aws_access_key_id=$aws_access_key_id -es aws_secret_access_key=$aws_secret_access_key --yum-package python-netaddr --yum-package git -p $(find ~/project/workspace/build/ -name *centos-Core*x86_64.wgn) ~/project/plugin.yaml
- run: ecosystem-test upload-plugin -PN utilities

Expand Down Expand Up @@ -80,8 +96,6 @@ workflows:
- integration_tests_py3:
requires:
- wagonorb/wagon
- wagonorb/arch64_wagon
- wagonorb/rhel_wagon
filters:
branches:
only: /([0-9\.]*\-build|master|dev)/
Expand Down Expand Up @@ -127,6 +141,9 @@ workflows:
branches:
only: /([0-9\.]*\-build|master|dev)/
- integration_tests_py3:
context:
- slack-secrets
<<: *job-post-steps
requires:
- wagonorb/wagon
- wagonorb/rhel_wagon
Expand Down
105 changes: 67 additions & 38 deletions .circleci/test_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import time
from os import environ
from json import loads, JSONDecodeError
from contextlib import contextmanager

import pytest
Expand All @@ -26,6 +28,24 @@

TEST_ID = environ.get('__ECOSYSTEM_TEST_ID', 'virtual-machine')

source = 'https://github.com/cloudify-community/tf-source/archive/refs/heads/main.zip' # noqa

public_params = {
'source': source,
'source_path': 'template/modules/public_vm',
}

private_params = {
'source': source,
'source_path': 'template/modules/private_vm',
}

private_params_force = {
'source': source,
'source_path': 'template/modules/private_vm',
'force': False
}


@contextmanager
def test_cleaner_upper():
Expand All @@ -36,65 +56,57 @@ def test_cleaner_upper():
raise


@pytest.mark.dependency(depends=['test_plan_protection'])
def test_drifts(*_, **__):
with test_cleaner_upper():
before_props = cloud_resources_node_instance_runtime_properties()
change_a_resource(before_props)
executions_start('refresh_terraform_resources', TEST_ID, 150)
after_props = cloud_resources_node_instance_runtime_properties()
drifts = after_props.get('drifts')
logger.info('Drifts: {drifts}'.format(drifts=drifts))
if drifts:
return
raise Exception('The test_drifts test failed.')


@pytest.mark.dependency()
def test_plan_protection(*_, **__):
with test_cleaner_upper():
params = {
'source': 'https://github.com/cloudify-community/tf-source/archive/refs/heads/main.zip', # noqa
'source_path': 'template/modules/public_vm',
}
executions_start('terraform_plan', TEST_ID, 300, params)
executions_start('terraform_plan', TEST_ID, 300, public_params)
logger.info('Wrap plan for public VM. '
'Now we will run reload_terraform_template for private VM '
'and it should fail.')
params = {
'source': 'https://github.com/cloudify-community/tf-source/archive/refs/heads/main.zip', # noqa
'source_path': 'template/modules/private_vm',
'force': False
}
try:
executions_start('reload_terraform_template', TEST_ID, 300, params)
executions_start(
'reload_terraform_template', TEST_ID, 300, private_params_force)
except EcosystemTestException:
logger.info('Apply caught our plan mismatch.')
logger.info('Apply caught our plan mismatch.'.upper())
else:
raise EcosystemTestException(
'Apply did not catch the plan mismatch.')
del params['force']
executions_start('terraform_plan', TEST_ID, 300, params)
logger.info('Now rerunning apply with a matching plan.')
executions_start('terraform_plan', TEST_ID, 300, private_params)
time.sleep(10)
before = cloud_resources_node_instance_runtime_properties()
logger.info('Before outputs: {before}'.format(
before=before.get('outputs')))
logger.info('Now rerunning plan.')
params['force'] = False
executions_start('reload_terraform_template', TEST_ID, 300, params)
executions_start(
'reload_terraform_template', TEST_ID, 300, private_params_force)
after = cloud_resources_node_instance_runtime_properties()
logger.info('After outputs: {after}'.format(
after=before.get('outputs')))
if after['outputs'] == before['outputs']:
raise Exception('Outputs should not match after reload.')


@pytest.mark.dependency(depends=['test_plan_protection'])
def test_drifts(*_, **__):
with test_cleaner_upper():
before_props = cloud_resources_node_instance_runtime_properties()
change_a_resource(before_props)
executions_start('refresh_terraform_resources', TEST_ID, 150)
after_props = cloud_resources_node_instance_runtime_properties()
drifts = after_props.get('drifts')
logger.info('Drifts: {drifts}'.format(drifts=drifts))
if drifts:
return
raise Exception('The test_drifts test failed.')


def nodes():
return cloudify_exec('cfy nodes list')


def node_instances():
return cloudify_exec('cfy node-instances list -d {}'.format(TEST_ID))
return cloudify_exec(
'cfy node-instances list -d {}'.format(TEST_ID), log=False)


def node_instance_by_name(name):
Expand All @@ -106,7 +118,7 @@ def node_instance_by_name(name):

def node_instance_runtime_properties(name):
node_instance = cloudify_exec(
'cfy node-instance get {name}'.format(name=name))
'cfy node-instance get {name}'.format(name=name), log=False)
return node_instance['runtime_properties']


Expand All @@ -118,8 +130,6 @@ def cloud_resources_node_instance_runtime_properties():
raise RuntimeError('No cloud_resources node instances found.')
runtime_properties = node_instance_runtime_properties(
node_instance['id'])
logger.info('Runtime properties: {runtime_properties}'.format(
runtime_properties=runtime_properties))
if not runtime_properties:
raise RuntimeError('No cloud_resources runtime_properties found.')
return runtime_properties
Expand All @@ -128,13 +138,32 @@ def cloud_resources_node_instance_runtime_properties():
def change_a_resource(props):
group = props['resources']['example_security_group']
sg_id = group['instances'][0]['attributes']['id']
environ['AWS_DEFAULT_REGION'] = \
props['resource_config']['variables']['aws_region']
ec2 = client('ec2')
terraform_vars = props['resource_config']['variables']
environ['AWS_DEFAULT_REGION'] = terraform_vars['aws_region']
access = get_secret(terraform_vars['access_key'])
secret = get_secret(terraform_vars['secret_key'])
client_kwargs = dict(
aws_access_key_id=access,
aws_secret_access_key=secret,
)
if 'token' in terraform_vars:
token = get_secret(terraform_vars['token'])
client_kwargs.update({'aws_session_token': token})
ec2 = client('ec2', **client_kwargs)
ec2.authorize_security_group_ingress(
GroupId=sg_id,
IpProtocol="tcp",
CidrIp="0.0.0.0/0",
FromPort=53,
ToPort=53
)


def get_secret(value):
try:
loaded_value = loads(value)
except JSONDecodeError:
return value
secret_name = loaded_value['get_secret']
value = cloudify_exec('cfy secrets get {}'.format(secret_name), log=False)
return value.get('value')
3 changes: 3 additions & 0 deletions CHANGELOG.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
0.20.0:
- CYBL-2014 Support backend migration from local to hosted.
- add __version__.py file in cloudify_tf folder
0.19.15: Do not delete files when debug node used.
0.19.14:
- Fix check status and check drift
Expand Down
1 change: 1 addition & 0 deletions cloudify_tf/__version__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
version = '0.20.0'
32 changes: 27 additions & 5 deletions cloudify_tf/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import os
import sys

from deepdiff import DeepDiff
from cloudify.decorators import operation
from cloudify import ctx as ctx_from_imports
from cloudify.utils import exception_to_error_cause
Expand Down Expand Up @@ -173,14 +174,35 @@ def apply(ctx, tf, force=False, **kwargs):
_apply(tf, old_plan, force)


@operation
@with_terraform
def migrate_state(ctx, tf, backend, backend_config, **_):
name = backend.get('name')
options = backend.get('options')
credentials = backend.get('credentials', {})
if credentials:
ctx.logger.info('Credentials are not used in migrate-state.')
tf.migrate_state(name, options, backend_config)
resource_config = utils.get_resource_config()
resource_config.update({'backend': backend})
utils.update_resource_config(resource_config)


class FailedPlanValidation(NonRecoverableError):
pass


def compare_plan_results(new_plan, old_plan, force):
if old_plan != new_plan:
ctx_from_imports.logger.debug('New plan and old plan diff {}'.format(
set(old_plan) ^ set(new_plan)))
def compare_plan_results(new_plan, old_plan):

left = sorted(old_plan.get('resource_changes', []),
key=lambda d: d['address'])
right = sorted(new_plan.get('resource_changes', []),
key=lambda d: d['address'])

diff = DeepDiff(left, right)
if diff:
ctx_from_imports.logger.info(
'Old plan and new plan diff {}'.format(diff))
raise FailedPlanValidation(
'The new plan differs from the old plan. '
'Please Rerun plan workflow before executing apply worfklow.')
Expand All @@ -193,7 +215,7 @@ def _apply(tf, old_plan=None, force=False):
tf.run_terratag()
if old_plan and not force:
new_plan = tf.plan_and_show()
compare_plan_results(new_plan, old_plan, force)
compare_plan_results(new_plan, old_plan)
if not force:
tf.check_tflint()
tf.check_tfsec()
Expand Down
30 changes: 24 additions & 6 deletions cloudify_tf/terraform/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,8 +310,7 @@ def plan_file(self):
json_result, _ = self.plan_and_show_two_formats()
with tempfile.NamedTemporaryFile(
'w',
suffix='.json',
delete=delete_debug()) as plan_file:
suffix='.json') as plan_file:
plan_file.write(json.dumps(json_result))
yield plan_file.name

Expand Down Expand Up @@ -389,18 +388,37 @@ def tfvars(self):
def tfvars(self, value):
self._tfvars = value

def init(self, command_line_args=None):
cmdline = ['init', '-no-color', '-input=false']
def init(self, command_line_args=None, prefix=None, no_input=True):
cmdline = ['init', '-no-color']
if no_input:
cmdline.append('-input=false')
if self.plugins_dir:
cmdline.append('--plugin-dir=%s' % self.plugins_dir)
if self.provider_upgrade:
cmdline.append('--upgrade')
command = self._tf_command(cmdline)
if command_line_args:
command.extend(command_line_args)
if prefix:
command[:0] = prefix
with self.runtime_file(command):
return self.execute(command)

def migrate_state(self, name, options, backend_config):
migrate_args = []
answer_yes = ['echo', 'yes', '|']
self._backend = {
'name': name,
'options': options,
}
self.put_backend()
for key, value in backend_config.items():
migrate_args.append(
'-backend-config="{key}={value}"'.format(
key=key, value=value))
migrate_args.append('-migrate-state')
self.init(migrate_args, answer_yes, no_input=False)

def destroy(self):
command = self._tf_command(['destroy',
'-auto-approve',
Expand Down Expand Up @@ -493,7 +511,7 @@ def plan_and_show_two_formats(self):
Execute terraform plan,
then terraform show on the generated tfplan file
"""
with tempfile.NamedTemporaryFile(delete=delete_debug()) as plan_file:
with tempfile.NamedTemporaryFile() as plan_file:
self.plan(plan_file.name)
json_result = self.show(plan_file.name)
plain_text_result = self.show_plain_text(plan_file.name)
Expand All @@ -505,7 +523,7 @@ def plan_and_show_state(self):
then terraform show on the generated tfplan file
"""
status_problems = []
with tempfile.NamedTemporaryFile(delete=delete_debug()) as plan_file:
with tempfile.NamedTemporaryFile() as plan_file:
self.plan(plan_file.name)
plan = self.show(plan_file.name)
self.refresh()
Expand Down
Loading

0 comments on commit 1faee85

Please sign in to comment.