From bb285d69c17c8d459eb5417a008d2f3058d4bc22 Mon Sep 17 00:00:00 2001 From: Yee Hing Tong Date: Wed, 21 Feb 2024 12:41:33 -0800 Subject: [PATCH] Remove eph storage (#4929) * remove ephemeral storage from flyte core helm chart values Signed-off-by: Yee Hing Tong * make helm Signed-off-by: Yee Hing Tong --------- Signed-off-by: Yee Hing Tong --- charts/flyte-core/README.md | 4 ++-- charts/flyte-core/values.yaml | 2 -- deployment/eks/flyte_aws_scheduler_helm_generated.yaml | 4 +--- deployment/eks/flyte_helm_controlplane_generated.yaml | 6 ++---- deployment/eks/flyte_helm_generated.yaml | 6 ++---- deployment/gcp/flyte_helm_controlplane_generated.yaml | 6 ++---- deployment/gcp/flyte_helm_generated.yaml | 6 ++---- deployment/sandbox/flyte_helm_generated.yaml | 6 ++---- 8 files changed, 13 insertions(+), 27 deletions(-) diff --git a/charts/flyte-core/README.md b/charts/flyte-core/README.md index e287e67633..460151fcbb 100644 --- a/charts/flyte-core/README.md +++ b/charts/flyte-core/README.md @@ -115,8 +115,8 @@ helm install gateway bitnami/contour -n flyte | configmap.schedulerConfig.scheduler.profilerPort | int | `10254` | | | configmap.task_logs | object | `{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":false}}}` | Section that configures how the Task logs are displayed on the UI. This has to be changed based on your actual logging provider. Refer to [structure](https://pkg.go.dev/github.com/lyft/flyteplugins/go/tasks/logs#LogConfig) to understand how to configure various logging engines | | configmap.task_logs.plugins.logs.cloudwatch-enabled | bool | `false` | One option is to enable cloudwatch logging for EKS, update the region and log group accordingly | -| configmap.task_resource_defaults | object | `{"task_resources":{"defaults":{"cpu":"100m","ephemeralStorage":"500Mi","memory":"500Mi"},"limits":{"cpu":2,"ephemeralStorage":"20Mi","gpu":1,"memory":"1Gi"}}}` | Task default resources configuration Refer to the full [structure](https://pkg.go.dev/github.com/lyft/flyteadmin@v0.3.37/pkg/runtime/interfaces#TaskResourceConfiguration). | -| configmap.task_resource_defaults.task_resources | object | `{"defaults":{"cpu":"100m","ephemeralStorage":"500Mi","memory":"500Mi"},"limits":{"cpu":2,"ephemeralStorage":"20Mi","gpu":1,"memory":"1Gi"}}` | Task default resources parameters | +| configmap.task_resource_defaults | object | `{"task_resources":{"defaults":{"cpu":"100m","memory":"500Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi"}}}` | Task default resources configuration Refer to the full [structure](https://pkg.go.dev/github.com/lyft/flyteadmin@v0.3.37/pkg/runtime/interfaces#TaskResourceConfiguration). | +| configmap.task_resource_defaults.task_resources | object | `{"defaults":{"cpu":"100m","memory":"500Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi"}}` | Task default resources parameters | | daskoperator | object | `{"enabled":false}` | Optional: Dask Plugin using the Dask Operator | | daskoperator.enabled | bool | `false` | - enable or disable the dask operator deployment installation | | databricks | object | `{"enabled":false,"plugin_config":{"plugins":{"databricks":{"databricksInstance":"dbc-a53b7a3c-614c","entrypointFile":"dbfs:///FileStore/tables/entrypoint.py"}}}}` | Optional: Databricks Plugin allows us to run the spark job on the Databricks platform. | diff --git a/charts/flyte-core/values.yaml b/charts/flyte-core/values.yaml index 4f6d9d12bc..109710edef 100755 --- a/charts/flyte-core/values.yaml +++ b/charts/flyte-core/values.yaml @@ -692,11 +692,9 @@ configmap: defaults: cpu: 100m memory: 500Mi - ephemeralStorage: 500Mi limits: cpu: 2 memory: 1Gi - ephemeralStorage: 20Mi gpu: 1 # -- Admin Client configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/subworkflow/launchplan#AdminConfig) diff --git a/deployment/eks/flyte_aws_scheduler_helm_generated.yaml b/deployment/eks/flyte_aws_scheduler_helm_generated.yaml index 18c60208b8..324828a966 100644 --- a/deployment/eks/flyte_aws_scheduler_helm_generated.yaml +++ b/deployment/eks/flyte_aws_scheduler_helm_generated.yaml @@ -192,12 +192,10 @@ data: task_resources: defaults: cpu: 1000m - ephemeralStorage: 500Mi memory: 1000Mi storage: 1000Mi limits: cpu: 2 - ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -849,7 +847,7 @@ spec: template: metadata: annotations: - configChecksum: "85f2694a4138443026b87878dbbc5f1e9f52aa54eb87ef4c64117d1d91e1a7f" + configChecksum: "2b5c85969f2bd85bb51a084f9fd72c20c3aca94be99e53cb4c4e9f78e77ebc5" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte diff --git a/deployment/eks/flyte_helm_controlplane_generated.yaml b/deployment/eks/flyte_helm_controlplane_generated.yaml index 520e7f2a03..166446ce79 100644 --- a/deployment/eks/flyte_helm_controlplane_generated.yaml +++ b/deployment/eks/flyte_helm_controlplane_generated.yaml @@ -173,12 +173,10 @@ data: task_resources: defaults: cpu: 1000m - ephemeralStorage: 500Mi memory: 1000Mi storage: 1000Mi limits: cpu: 2 - ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -555,7 +553,7 @@ spec: template: metadata: annotations: - configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" + configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -975,7 +973,7 @@ spec: template: metadata: annotations: - configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" + configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte diff --git a/deployment/eks/flyte_helm_generated.yaml b/deployment/eks/flyte_helm_generated.yaml index ab009db4cb..94743f22aa 100644 --- a/deployment/eks/flyte_helm_generated.yaml +++ b/deployment/eks/flyte_helm_generated.yaml @@ -204,12 +204,10 @@ data: task_resources: defaults: cpu: 1000m - ephemeralStorage: 500Mi memory: 1000Mi storage: 1000Mi limits: cpu: 2 - ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -880,7 +878,7 @@ spec: template: metadata: annotations: - configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" + configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -1300,7 +1298,7 @@ spec: template: metadata: annotations: - configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" + configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte diff --git a/deployment/gcp/flyte_helm_controlplane_generated.yaml b/deployment/gcp/flyte_helm_controlplane_generated.yaml index 3ede3cd5be..38dfe201dd 100644 --- a/deployment/gcp/flyte_helm_controlplane_generated.yaml +++ b/deployment/gcp/flyte_helm_controlplane_generated.yaml @@ -178,12 +178,10 @@ data: task_resources: defaults: cpu: 500m - ephemeralStorage: 500Mi memory: 500Mi storage: 500Mi limits: cpu: 2 - ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -570,7 +568,7 @@ spec: template: metadata: annotations: - configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" + configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -990,7 +988,7 @@ spec: template: metadata: annotations: - configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" + configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte diff --git a/deployment/gcp/flyte_helm_generated.yaml b/deployment/gcp/flyte_helm_generated.yaml index a3ead16e95..659babf838 100644 --- a/deployment/gcp/flyte_helm_generated.yaml +++ b/deployment/gcp/flyte_helm_generated.yaml @@ -209,12 +209,10 @@ data: task_resources: defaults: cpu: 500m - ephemeralStorage: 500Mi memory: 500Mi storage: 500Mi limits: cpu: 2 - ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -903,7 +901,7 @@ spec: template: metadata: annotations: - configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" + configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -1323,7 +1321,7 @@ spec: template: metadata: annotations: - configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" + configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte diff --git a/deployment/sandbox/flyte_helm_generated.yaml b/deployment/sandbox/flyte_helm_generated.yaml index c1145fb724..7417c9bdf2 100644 --- a/deployment/sandbox/flyte_helm_generated.yaml +++ b/deployment/sandbox/flyte_helm_generated.yaml @@ -334,12 +334,10 @@ data: task_resources: defaults: cpu: 100m - ephemeralStorage: 500Mi memory: 200Mi storage: 5Mi limits: cpu: 2 - ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 20Mi @@ -6686,7 +6684,7 @@ spec: template: metadata: annotations: - configChecksum: "82d6ffa2a2dd83eb11c491a95af43fdede659d6b5b400b6edcd88291a28c4f4" + configChecksum: "45f0232531c0d1494809cf83387a95b2fc802019ea095de7a24ccd4f8de86ec" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -7077,7 +7075,7 @@ spec: template: metadata: annotations: - configChecksum: "82d6ffa2a2dd83eb11c491a95af43fdede659d6b5b400b6edcd88291a28c4f4" + configChecksum: "45f0232531c0d1494809cf83387a95b2fc802019ea095de7a24ccd4f8de86ec" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte