Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Running helm from lambda POC #40

Draft
wants to merge 5 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 17 additions & 22 deletions cdk/domino_cdk/aws_configurator.py
Original file line number Diff line number Diff line change
@@ -1,49 +1,41 @@
from os.path import isfile
from os import path
from re import MULTILINE
from re import split as re_split

import aws_cdk.aws_ec2 as ec2
import aws_cdk.aws_eks as eks
import requests
from aws_cdk import core as cdk
from requests import get as requests_get
from yaml import safe_load as yaml_safe_load

from .provisioners.lambda_utils import helm_lambda

dirname = path.dirname(path.abspath(__file__))

manifests = [
[
"calico",
"https://raw.githubusercontent.com/aws/amazon-vpc-cni-k8s/v1.7.10/config/master/calico.yaml",
]
{
"vendored": path.join(dirname, "manifests", "calico.yaml"), # in cwd
"alternative_url": "https://raw.githubusercontent.com/aws/amazon-vpc-cni-k8s/v1.7.10/config/master/calico.yaml",
},
]


# Currently this just installs calico directly via manifest, but will
# ultimately become a lambda that handles various tasks (calico,
# deprovisoning efs backups/route53, tagging the eks cluster until
# the CloudFormation api supports it, etc.)
class DominoAwsConfigurator:
def __init__(self, scope: cdk.Construct, eks_cluster: eks.Cluster, vpc: ec2.Vpc):
self.scope = scope
self.eks_cluster = eks_cluster
self.vpc = vpc

self.install_calico()
self._install_calico_helm_lambda()

def install_calico(self):
# This produces an obnoxious diff on every subsequent run
# Using a helm chart does not, so we should switch to that
# However, we need to figure out how to get the helm chart
# accessible by the CDK lambda first. Not clear how to give
# s3 perms to it programmatically, and while ECR might be
# an option it also doesn't seem like there's a way to push
# the chart with existing api calls.
# Probably need to do some custom lambda thing.
for manifest in manifests:
filename = f"{manifest[0]}.yaml"
if isfile(filename):
filename = manifest["vendored"]
if path.isfile(filename):
with open(filename) as f:
manifest_text = f.read()
else:
manifest_text = requests_get(manifest[1]).text
manifest_text = requests.get(manifest["alternative_url"]).text
loaded_manifests = [yaml_safe_load(i) for i in re_split("^---$", manifest_text, flags=MULTILINE) if i]
crds = eks.KubernetesManifest(
self.scope,
Expand All @@ -58,3 +50,6 @@ def install_calico(self):
manifest=[notcrd for notcrd in loaded_manifests if notcrd["kind"] != "CustomResourceDefinition"],
)
non_crds.node.add_dependency(crds)

def _install_calico_helm_lambda(self):
helm_lambda(scope=self.scope, name="install_calico", cluster=self.eks_cluster, vpc=self.vpc)
Loading