Skip to content

Commit

Permalink
Merge branch 'master' into COMPINFRA-2833
Browse files Browse the repository at this point in the history
  • Loading branch information
ajayOO8 authored Aug 2, 2023
2 parents 733a69f + 7970984 commit 3546a24
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 7 deletions.
11 changes: 11 additions & 0 deletions debian/changelog
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
paasta-tools (0.192.0) xenial; urgency=medium

* 0.192.0 tagged with 'make release'
Commit: Add support for eks cluster not matching --cluster spark arg
(#3636) We may make a different cluster for spark in dev, but we
don"t really want to deal with a soaconfigs migration in case we
decide to rollback This PR transparently sets the right eks cluster
params based on whether or not eks usage is toggled on or off

-- Luis Perez <[email protected]> Tue, 01 Aug 2023 12:24:29 -0700

paasta-tools (0.191.2) xenial; urgency=medium

* 0.191.2 tagged with 'make release'
Expand Down
2 changes: 1 addition & 1 deletion paasta_tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@
# setup phase, the dependencies may not exist on disk yet.
#
# Don't bump version manually. See `make release` docs in ./Makefile
__version__ = "0.191.2"
__version__ = "0.192.0"
39 changes: 34 additions & 5 deletions paasta_tools/cli/cmds/spark_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -1101,8 +1101,28 @@ def _validate_pool(args, system_paasta_config):


def _get_k8s_url_for_cluster(cluster: str) -> Optional[str]:
"""
Annoyingly, there's two layers of aliases: one to figure out what
k8s server url to use (this one) and another to figure out what
soaconfigs filename to use ;_;
This exists so that we can map something like `--cluster pnw-devc`
into spark-pnw-devc's k8s apiserver url without needing to update
any soaconfigs/alter folk's muscle memory.
Ideally we can get rid of this entirely once spark-run reads soaconfigs
in a manner more closely aligned to what we do with other paasta workloads
(i.e., have it automatically determine where to run based on soaconfigs
filenames - and not rely on explicit config)
"""
realized_cluster = (
load_system_paasta_config().get_eks_cluster_aliases().get(cluster, cluster)
)
return (
load_system_paasta_config().get_kube_clusters().get(cluster, {}).get("server")
load_system_paasta_config()
.get_kube_clusters()
.get(realized_cluster, {})
.get("server")
)


Expand Down Expand Up @@ -1137,14 +1157,16 @@ def paasta_spark_run(args):
if not _validate_pool(args, system_paasta_config):
return 1

# annoyingly, there's two layers of aliases: one for the soaconfigs to read from
# (that's this alias lookup) - and then another layer later when figuring out what
# k8s server url to use ;_;
cluster = system_paasta_config.get_cluster_aliases().get(args.cluster, args.cluster)
# Use the default spark:client instance configs if not provided
try:
instance_config = get_instance_config(
service=args.service,
instance=args.instance,
cluster=system_paasta_config.get_cluster_aliases().get(
args.cluster, args.cluster
),
cluster=cluster,
load_deployments=args.build is False and args.image is None,
soa_dir=args.yelpsoa_config_root,
)
Expand Down Expand Up @@ -1231,12 +1253,19 @@ def paasta_spark_run(args):

use_eks = decide_final_eks_toggle_state(args.use_eks_override)
k8s_server_address = _get_k8s_url_for_cluster(args.cluster) if use_eks else None
paasta_cluster = (
args.cluster
if not use_eks
else load_system_paasta_config()
.get_eks_cluster_aliases()
.get(args.cluster, args.cluster)
)
spark_conf = get_spark_conf(
cluster_manager=args.cluster_manager,
spark_app_base_name=app_base_name,
docker_img=docker_image_digest,
user_spark_opts=user_spark_opts,
paasta_cluster=args.cluster,
paasta_cluster=paasta_cluster,
paasta_pool=args.pool,
paasta_service=args.service,
paasta_instance=paasta_instance,
Expand Down
4 changes: 4 additions & 0 deletions paasta_tools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2029,6 +2029,7 @@ class SystemPaastaConfigDict(TypedDict, total=False):
kube_clusters: Dict
spark_use_eks_default: bool
sidecar_requirements_config: Dict[str, KubeContainerResourceRequest]
eks_cluster_aliases: Dict[str, str]


def load_system_paasta_config(
Expand Down Expand Up @@ -2754,6 +2755,9 @@ def get_skip_cpu_burst_validation_services(self) -> List[str]:
def get_cluster_aliases(self) -> Dict[str, str]:
return self.config_dict.get("cluster_aliases", {})

def get_eks_cluster_aliases(self) -> Dict[str, str]:
return self.config_dict.get("eks_cluster_aliases", {})

def get_cluster_pools(self) -> Dict[str, List[str]]:
return self.config_dict.get("allowed_pools", {})

Expand Down
2 changes: 1 addition & 1 deletion yelp_package/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.

# Edit this release and run "make release"
RELEASE=0.191.2
RELEASE=0.192.0

SHELL=/bin/bash

Expand Down

0 comments on commit 3546a24

Please sign in to comment.