From 9881911b7747fe0f3e385a48c206e2fba7addb99 Mon Sep 17 00:00:00 2001 From: Wilmer Bandres Date: Mon, 17 Jul 2023 23:47:06 -0700 Subject: [PATCH 1/6] Fixing script bug reordering logic Fixing types updating setuptools upgrading pyyaml updating setuptools Adding try catch for k8s 1.22 Trying new setup tools Downgrading pyyaml again going back to old setuptools version using new yaml version docker-compose as apt-get installed docker-compose as apt-get installed docker-compose as apt-get installed docker-compose as apt-get installed docker-compose as apt-get installed docker-compose as apt-get installed no pinned version docker-compose pinning github Pinnin versiong --- .github/workflows/ci.yml | 2 + paasta_tools/cli/cmds/spark_run.py | 2 +- paasta_tools/kubernetes_tools.py | 22 ++++-- paasta_tools/setup_kubernetes_crd.py | 78 ++++++++++++------- paasta_tools/setup_kubernetes_internal_crd.py | 22 ++---- requirements.txt | 2 +- tox.ini | 3 - 7 files changed, 76 insertions(+), 55 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8f63af1de0..f957bd6f62 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,6 +36,8 @@ jobs: DOCKER_REGISTRY: "" steps: - uses: actions/checkout@v2 + - name: Install docker-compose + run: sudo apt-get update && sudo apt-get install -y docker-compose=1.25.0-1 - uses: actions/setup-python@v2 with: python-version: 3.7 diff --git a/paasta_tools/cli/cmds/spark_run.py b/paasta_tools/cli/cmds/spark_run.py index 568cbd45ba..1d0d73bb47 100644 --- a/paasta_tools/cli/cmds/spark_run.py +++ b/paasta_tools/cli/cmds/spark_run.py @@ -1203,7 +1203,7 @@ def paasta_spark_run(args): document = POD_TEMPLATE.format( spark_pod_label=limit_size_with_hash(f"exec-{app_base_name}"), ) - parsed_pod_template = yaml.load(document) + parsed_pod_template = yaml.full_load(document) with open(pod_template_path, "w") as f: yaml.dump(parsed_pod_template, f) diff --git a/paasta_tools/kubernetes_tools.py b/paasta_tools/kubernetes_tools.py index 9801f699bf..4518fc12c3 100644 --- a/paasta_tools/kubernetes_tools.py +++ b/paasta_tools/kubernetes_tools.py @@ -48,6 +48,7 @@ from kubernetes.client import V1Affinity from kubernetes.client import V1AWSElasticBlockStoreVolumeSource from kubernetes.client import V1beta1CustomResourceDefinition +from kubernetes.client import V1beta1CustomResourceDefinitionList from kubernetes.client import V1beta1PodDisruptionBudget from kubernetes.client import V1beta1PodDisruptionBudgetSpec from kubernetes.client import V1Capabilities @@ -3859,27 +3860,32 @@ def mode_to_int(mode: Optional[Union[str, int]]) -> Optional[int]: def update_crds( - apiextensions: Union[ - kube_client.ApiextensionsV1Api, kube_client.ApiextensionsV1beta1Api - ], + kube_client: KubeClient, desired_crds: Collection[ Union[V1CustomResourceDefinition, V1beta1CustomResourceDefinition] ], - existing_crds: V1CustomResourceDefinitionList, + existing_crds: Union[ + V1CustomResourceDefinitionList, V1beta1CustomResourceDefinitionList + ], ) -> bool: - success = True for desired_crd in desired_crds: existing_crd = None for crd in existing_crds.items: if crd.metadata.name == desired_crd.metadata["name"]: existing_crd = crd break - try: + + if "beta" in desired_crd.api_version: + apiextensions = kube_client.apiextensions_v1_beta1 + else: + apiextensions = kube_client.apiextensions + if existing_crd: desired_crd.metadata[ "resourceVersion" ] = existing_crd.metadata.resource_version + apiextensions.replace_custom_resource_definition( name=desired_crd.metadata["name"], body=desired_crd ) @@ -3901,9 +3907,9 @@ def update_crds( f"status: {exc.status}, reason: {exc.reason}" ) log.debug(exc.body) - success = False + return False - return success + return True def sanitise_label_value(value: str) -> str: diff --git a/paasta_tools/setup_kubernetes_crd.py b/paasta_tools/setup_kubernetes_crd.py index 932917c2b9..04a78c1451 100644 --- a/paasta_tools/setup_kubernetes_crd.py +++ b/paasta_tools/setup_kubernetes_crd.py @@ -29,6 +29,7 @@ import service_configuration_lib from kubernetes.client import V1beta1CustomResourceDefinition from kubernetes.client import V1CustomResourceDefinition +from kubernetes.client.exceptions import ApiException from paasta_tools.kubernetes_tools import KubeClient from paasta_tools.kubernetes_tools import paasta_prefixed @@ -101,29 +102,48 @@ def setup_kube_crd( services: Sequence[str], soa_dir: str = DEFAULT_SOA_DIR, ) -> bool: - for apiextension, crd_class in [ - (kube_client.apiextensions, V1CustomResourceDefinition), - (kube_client.apiextensions_v1_beta1, V1beta1CustomResourceDefinition), - ]: - existing_crds = apiextension.list_custom_resource_definition( - label_selector=paasta_prefixed("service") - ) + existing_crds = kube_client.apiextensions.list_custom_resource_definition( + label_selector=paasta_prefixed("service") + ) - desired_crds = [] - for service in services: - crd_config = service_configuration_lib.read_extra_service_information( - service, f"crd-{cluster}", soa_dir=soa_dir + # This step can fail in k8s 1.22 since this version is not existing anymore + # we need to support this for the transition + try: + existing_crds_v1_beta1 = ( + kube_client.apiextensions_v1_beta1.list_custom_resource_definition( + label_selector=paasta_prefixed("service") + ) + ) + except ApiException as e: + existing_crds_v1_beta1 = [] + log.debug(e) + + desired_crds = [] + desired_crds_v1_beta_1 = [] + for service in services: + crd_config = service_configuration_lib.read_extra_service_information( + service, f"crd-{cluster}", soa_dir=soa_dir + ) + if not crd_config: + log.info("nothing to deploy") + continue + + metadata = crd_config.get("metadata", {}) + if "labels" not in metadata: + metadata["labels"] = {} + metadata["labels"]["yelp.com/paasta_service"] = service + metadata["labels"][paasta_prefixed("service")] = service + + if "beta" in crd_config["apiVersion"]: + desired_crd = V1beta1CustomResourceDefinition( + api_version=crd_config.get("apiVersion"), + kind=crd_config.get("kind"), + metadata=metadata, + spec=crd_config.get("spec"), ) - if not crd_config: - log.info("nothing to deploy") - continue - - metadata = crd_config.get("metadata", {}) - if "labels" not in metadata: - metadata["labels"] = {} - metadata["labels"]["yelp.com/paasta_service"] = service - metadata["labels"][paasta_prefixed("service")] = service - desired_crd = crd_class( + desired_crds_v1_beta_1.append(desired_crd) + else: + desired_crd = V1CustomResourceDefinition( api_version=crd_config.get("apiVersion"), kind=crd_config.get("kind"), metadata=metadata, @@ -131,13 +151,15 @@ def setup_kube_crd( ) desired_crds.append(desired_crd) - if update_crds( - apiextensions=apiextension, - desired_crds=desired_crds, - existing_crds=existing_crds, - ): - return True - return False + return update_crds( + kube_client=kube_client, + desired_crds=desired_crds, + existing_crds=existing_crds, + ) and update_crds( + kube_client=kube_client, + desired_crds=desired_crds_v1_beta_1, + existing_crds=existing_crds_v1_beta1, + ) if __name__ == "__main__": diff --git a/paasta_tools/setup_kubernetes_internal_crd.py b/paasta_tools/setup_kubernetes_internal_crd.py index 6e95874b48..ae84e08e5d 100755 --- a/paasta_tools/setup_kubernetes_internal_crd.py +++ b/paasta_tools/setup_kubernetes_internal_crd.py @@ -140,20 +140,14 @@ def main() -> None: def setup_kube_internal_crd( kube_client: KubeClient, ) -> bool: - for apiextension in [ - kube_client.apiextensions, - kube_client.apiextensions_v1_beta1, - ]: - existing_crds = apiextension.list_custom_resource_definition( - label_selector=paasta_prefixed("internal") - ) - if update_crds( - apiextensions=apiextension, - desired_crds=INTERNAL_CRDS, - existing_crds=existing_crds, - ): - return True - return False + existing_crds = kube_client.apiextensions.list_custom_resource_definition( + label_selector=paasta_prefixed("internal") + ) + return update_crds( + kube_client=kube_client, + desired_crds=INTERNAL_CRDS, + existing_crds=existing_crds, + ) if __name__ == "__main__": diff --git a/requirements.txt b/requirements.txt index 89da4ec80b..30e16b3f68 100644 --- a/requirements.txt +++ b/requirements.txt @@ -79,7 +79,7 @@ python-iptables==1.0.1 python-utils==2.0.1 pytimeparse==1.1.5 pytz==2016.10 -pyyaml==5.4.1 +pyyaml==6.0.1 repoze.lru==0.6 requests==2.25.0 requests-cache==0.6.3 diff --git a/tox.ini b/tox.ini index 09bf738423..12f45ad0af 100644 --- a/tox.ini +++ b/tox.ini @@ -90,7 +90,6 @@ basepython = python3.7 whitelist_externals = bash deps = urllib3<2.0 - docker-compose=={[tox]docker_compose_version} setenv = passenv = KIND_CLUSTER @@ -118,8 +117,6 @@ commands = [testenv:example_cluster] changedir=example_cluster/ passenv = DOCKER_TLS_VERIFY DOCKER_HOST DOCKER_CERT_PATH -deps = - docker-compose=={[tox]docker_compose_version} commands = docker-compose down docker-compose --verbose build --build-arg DOCKER_REGISTRY={env:DOCKER_REGISTRY:docker-dev.yelpcorp.com/} --build-arg PIP_INDEX_URL={env:PIP_INDEX_URL:https://pypi.yelpcorp.com/simple} From dd4fa8b19347acb589fe79ad6707393ba1d91fb0 Mon Sep 17 00:00:00 2001 From: Wilmer Bandres Date: Tue, 18 Jul 2023 09:53:23 -0700 Subject: [PATCH 2/6] adding PR feedback --- paasta_tools/cli/cmds/spark_run.py | 2 +- paasta_tools/kubernetes_tools.py | 2 +- paasta_tools/setup_kubernetes_crd.py | 14 ++++++++------ 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/paasta_tools/cli/cmds/spark_run.py b/paasta_tools/cli/cmds/spark_run.py index 1d0d73bb47..9638cd0467 100644 --- a/paasta_tools/cli/cmds/spark_run.py +++ b/paasta_tools/cli/cmds/spark_run.py @@ -1203,7 +1203,7 @@ def paasta_spark_run(args): document = POD_TEMPLATE.format( spark_pod_label=limit_size_with_hash(f"exec-{app_base_name}"), ) - parsed_pod_template = yaml.full_load(document) + parsed_pod_template = yaml.safe_load(document) with open(pod_template_path, "w") as f: yaml.dump(parsed_pod_template, f) diff --git a/paasta_tools/kubernetes_tools.py b/paasta_tools/kubernetes_tools.py index 4518fc12c3..694025f1ac 100644 --- a/paasta_tools/kubernetes_tools.py +++ b/paasta_tools/kubernetes_tools.py @@ -3876,7 +3876,7 @@ def update_crds( break try: - if "beta" in desired_crd.api_version: + if "apiextensions.k8s.io/v1beta1" == desired_crd.api_version: apiextensions = kube_client.apiextensions_v1_beta1 else: apiextensions = kube_client.apiextensions diff --git a/paasta_tools/setup_kubernetes_crd.py b/paasta_tools/setup_kubernetes_crd.py index 04a78c1451..eabc667c06 100644 --- a/paasta_tools/setup_kubernetes_crd.py +++ b/paasta_tools/setup_kubernetes_crd.py @@ -114,12 +114,14 @@ def setup_kube_crd( label_selector=paasta_prefixed("service") ) ) - except ApiException as e: + except ApiException: existing_crds_v1_beta1 = [] - log.debug(e) + log.debug( + "Listing CRDs with apiextensions/v1beta1 not supported on this cluster, falling back to v1" + ) desired_crds = [] - desired_crds_v1_beta_1 = [] + desired_crds_v1_beta1 = [] for service in services: crd_config = service_configuration_lib.read_extra_service_information( service, f"crd-{cluster}", soa_dir=soa_dir @@ -134,14 +136,14 @@ def setup_kube_crd( metadata["labels"]["yelp.com/paasta_service"] = service metadata["labels"][paasta_prefixed("service")] = service - if "beta" in crd_config["apiVersion"]: + if "apiextensions.k8s.io/v1beta1" == crd_config["apiVersion"]: desired_crd = V1beta1CustomResourceDefinition( api_version=crd_config.get("apiVersion"), kind=crd_config.get("kind"), metadata=metadata, spec=crd_config.get("spec"), ) - desired_crds_v1_beta_1.append(desired_crd) + desired_crds_v1_beta1.append(desired_crd) else: desired_crd = V1CustomResourceDefinition( api_version=crd_config.get("apiVersion"), @@ -157,7 +159,7 @@ def setup_kube_crd( existing_crds=existing_crds, ) and update_crds( kube_client=kube_client, - desired_crds=desired_crds_v1_beta_1, + desired_crds=desired_crds_v1_beta1, existing_crds=existing_crds_v1_beta1, ) From 4596d2a4fbb1d3a1f8b4e45b294f5685dd784447 Mon Sep 17 00:00:00 2001 From: Wilmer Bandres Date: Tue, 18 Jul 2023 13:23:49 -0700 Subject: [PATCH 3/6] Removing fix for tests --- requirements.txt | 2 +- tox.ini | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 30e16b3f68..89da4ec80b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -79,7 +79,7 @@ python-iptables==1.0.1 python-utils==2.0.1 pytimeparse==1.1.5 pytz==2016.10 -pyyaml==6.0.1 +pyyaml==5.4.1 repoze.lru==0.6 requests==2.25.0 requests-cache==0.6.3 diff --git a/tox.ini b/tox.ini index 12f45ad0af..09bf738423 100644 --- a/tox.ini +++ b/tox.ini @@ -90,6 +90,7 @@ basepython = python3.7 whitelist_externals = bash deps = urllib3<2.0 + docker-compose=={[tox]docker_compose_version} setenv = passenv = KIND_CLUSTER @@ -117,6 +118,8 @@ commands = [testenv:example_cluster] changedir=example_cluster/ passenv = DOCKER_TLS_VERIFY DOCKER_HOST DOCKER_CERT_PATH +deps = + docker-compose=={[tox]docker_compose_version} commands = docker-compose down docker-compose --verbose build --build-arg DOCKER_REGISTRY={env:DOCKER_REGISTRY:docker-dev.yelpcorp.com/} --build-arg PIP_INDEX_URL={env:PIP_INDEX_URL:https://pypi.yelpcorp.com/simple} From 6bf359c5b75bf86c25d507499769d50585f2b1b1 Mon Sep 17 00:00:00 2001 From: Wilmer Bandres Date: Tue, 18 Jul 2023 13:24:43 -0700 Subject: [PATCH 4/6] Rolling back safe_load --- paasta_tools/cli/cmds/spark_run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/paasta_tools/cli/cmds/spark_run.py b/paasta_tools/cli/cmds/spark_run.py index 9638cd0467..de176823ab 100644 --- a/paasta_tools/cli/cmds/spark_run.py +++ b/paasta_tools/cli/cmds/spark_run.py @@ -1203,7 +1203,7 @@ def paasta_spark_run(args): document = POD_TEMPLATE.format( spark_pod_label=limit_size_with_hash(f"exec-{app_base_name}"), ) - parsed_pod_template = yaml.safe_load(document) + parsed_pod_template = yaml.safe(document) with open(pod_template_path, "w") as f: yaml.dump(parsed_pod_template, f) From 8356392ba587de7bb49411eabcd7dac57dde3b8e Mon Sep 17 00:00:00 2001 From: Wilmer Bandres Date: Tue, 18 Jul 2023 13:28:15 -0700 Subject: [PATCH 5/6] Fixing spark load --- paasta_tools/cli/cmds/spark_run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/paasta_tools/cli/cmds/spark_run.py b/paasta_tools/cli/cmds/spark_run.py index de176823ab..568cbd45ba 100644 --- a/paasta_tools/cli/cmds/spark_run.py +++ b/paasta_tools/cli/cmds/spark_run.py @@ -1203,7 +1203,7 @@ def paasta_spark_run(args): document = POD_TEMPLATE.format( spark_pod_label=limit_size_with_hash(f"exec-{app_base_name}"), ) - parsed_pod_template = yaml.safe(document) + parsed_pod_template = yaml.load(document) with open(pod_template_path, "w") as f: yaml.dump(parsed_pod_template, f) From e88f9e7e2255dbc1aca5b3ca09b960d9dfe6cf21 Mon Sep 17 00:00:00 2001 From: Wilmer Bandres Date: Tue, 18 Jul 2023 13:43:01 -0700 Subject: [PATCH 6/6] Removing ci workflow --- .github/workflows/ci.yml | 2 -- paasta_tools/cli/cmds/spark_run.py | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f957bd6f62..8f63af1de0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,8 +36,6 @@ jobs: DOCKER_REGISTRY: "" steps: - uses: actions/checkout@v2 - - name: Install docker-compose - run: sudo apt-get update && sudo apt-get install -y docker-compose=1.25.0-1 - uses: actions/setup-python@v2 with: python-version: 3.7 diff --git a/paasta_tools/cli/cmds/spark_run.py b/paasta_tools/cli/cmds/spark_run.py index 568cbd45ba..9638cd0467 100644 --- a/paasta_tools/cli/cmds/spark_run.py +++ b/paasta_tools/cli/cmds/spark_run.py @@ -1203,7 +1203,7 @@ def paasta_spark_run(args): document = POD_TEMPLATE.format( spark_pod_label=limit_size_with_hash(f"exec-{app_base_name}"), ) - parsed_pod_template = yaml.load(document) + parsed_pod_template = yaml.safe_load(document) with open(pod_template_path, "w") as f: yaml.dump(parsed_pod_template, f)