diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 2e61f36f47..3a1f9869dc 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -9,6 +9,7 @@ on: push: branches: - master + - 'release-v**' env: GO_VERSION: "1.22" PRIORITIES: "P0" @@ -32,8 +33,7 @@ jobs: component: - datacatalog - flyteadmin - # TODO(monorepo): Enable lint flytecopilot - # - flytecopilot + - flytecopilot - flytectl - flyteidl - flyteplugins diff --git a/.github/workflows/flytectl-install.yml b/.github/workflows/flytectl-install.yml index cbdb3795e9..3d56a805e2 100644 --- a/.github/workflows/flytectl-install.yml +++ b/.github/workflows/flytectl-install.yml @@ -8,6 +8,7 @@ on: pull_request: paths: - flytectl/** + - 'release-v**' push: branches: - master diff --git a/.github/workflows/flyteidl-buf-publish.yml b/.github/workflows/flyteidl-buf-publish.yml index f11bf3d44a..aef8c94e28 100644 --- a/.github/workflows/flyteidl-buf-publish.yml +++ b/.github/workflows/flyteidl-buf-publish.yml @@ -6,6 +6,7 @@ on: - artifacts-shell-2 - artifacts - master + - 'release-v**' paths: - 'flyteidl/**' jobs: diff --git a/.github/workflows/flyteidl-checks.yml b/.github/workflows/flyteidl-checks.yml index f8a1d0f4d2..781b173e40 100644 --- a/.github/workflows/flyteidl-checks.yml +++ b/.github/workflows/flyteidl-checks.yml @@ -9,6 +9,7 @@ on: push: branches: - master + - 'release-v**' env: GO_VERSION: "1.22" jobs: diff --git a/.github/workflows/helm-charts.yaml b/.github/workflows/helm-charts.yaml index 63e81adf3a..6c87eda61c 100644 --- a/.github/workflows/helm-charts.yaml +++ b/.github/workflows/helm-charts.yaml @@ -10,6 +10,7 @@ on: branches: - master - rc/* + - 'release-v**' workflow_dispatch: jobs: diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index fe2f8535af..506eca5ae9 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -59,7 +59,7 @@ jobs: - name: Before Build run: ${{ inputs.before-build }} - name: Build and Push Image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . file: ${{ inputs.dockerfile }} diff --git a/.github/workflows/sandbox.yml b/.github/workflows/sandbox.yml index 0899ec83e6..1e18109277 100644 --- a/.github/workflows/sandbox.yml +++ b/.github/workflows/sandbox.yml @@ -53,7 +53,7 @@ jobs: username: "${{ secrets.FLYTE_BOT_USERNAME }}" password: "${{ secrets.FLYTE_BOT_PAT }}" - name: Build and push DIND Image - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: . platforms: linux/arm64, linux/amd64 diff --git a/.github/workflows/single-binary.yml b/.github/workflows/single-binary.yml index 4f3fdea994..2849c1eac4 100644 --- a/.github/workflows/single-binary.yml +++ b/.github/workflows/single-binary.yml @@ -10,6 +10,7 @@ on: branches: - master - rc/* + - 'release-v**' workflow_dispatch: jobs: @@ -72,7 +73,7 @@ jobs: run: | mkdir -p docker/sandbox-bundled/images/tar/{arm64,amd64} - name: Export ARM64 Image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . platforms: linux/arm64 @@ -83,7 +84,7 @@ jobs: file: Dockerfile outputs: type=docker,dest=docker/sandbox-bundled/images/tar/arm64/flyte-binary.tar - name: Export AMD64 Image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . platforms: linux/amd64 @@ -107,7 +108,7 @@ jobs: password: "${{ secrets.FLYTE_BOT_PAT }}" - name: Build and push Image if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }} - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: . platforms: linux/arm64, linux/amd64 @@ -143,7 +144,7 @@ jobs: driver-opts: image=moby/buildkit:master buildkitd-flags: "--allow-insecure-entitlement security.insecure" - name: Build sandbox image for functional tests - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: docker/sandbox-bundled load: true @@ -158,6 +159,8 @@ jobs: with: python-version: "3.12" - uses: unionai/flytectl-setup-action@v0.0.3 + with: + version: '0.9.2' - name: Setup sandbox run: | mkdir -p ~/.flyte/sandbox @@ -196,6 +199,9 @@ jobs: --version ${{ env.FLYTESNACKS_VERSION }} \ flytesnacks/$line; done < flytesnacks/flyte_tests.txt + - name: Install Pytest + run: | + pip install pytest - name: End2End run: | make end2end_execute @@ -239,7 +245,7 @@ jobs: username: "${{ secrets.FLYTE_BOT_USERNAME }}" password: "${{ secrets.FLYTE_BOT_PAT }}" - name: Build and push multi-arch image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: docker/sandbox-bundled allow: "security.insecure" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0571e60eea..8915d88fbe 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -8,6 +8,7 @@ on: push: branches: - master + - 'release-v**' pull_request: jobs: compile: diff --git a/.github/workflows/validate-helm-charts.yaml b/.github/workflows/validate-helm-charts.yaml index 8e40ffe8d7..1bf450a858 100644 --- a/.github/workflows/validate-helm-charts.yaml +++ b/.github/workflows/validate-helm-charts.yaml @@ -4,6 +4,7 @@ on: pull_request: branches: - master + - 'release-v**' paths: - "charts/**" - "deployment/**" diff --git a/.gitignore b/.gitignore index 6b280884f9..80120c5095 100644 --- a/.gitignore +++ b/.gitignore @@ -38,3 +38,4 @@ docs/examples docs/_src docs/_projects docs/tests +empty-config.yaml diff --git a/CHANGELOG/CHANGELOG-v1.14.0.md b/CHANGELOG/CHANGELOG-v1.14.0.md new file mode 100644 index 0000000000..132210dfc8 --- /dev/null +++ b/CHANGELOG/CHANGELOG-v1.14.0.md @@ -0,0 +1,207 @@ +# Flyte 1.14.0 Release Notes + +## Added +- Support for FlyteDirectory as input to ContainerTask (#5715) + +A significant update to the flytekit storage interface enables downloading multi-part blobs. This allows Flyte to copy a FlyteDirectory as an input to ContainerTasks, enabling higher flexibility in workflow development with raw containers. +Fixed + +- Better handling of CORS in TLS connections (#5855) + +When using Flyte with TLS certificates, CORS options were not enabled causing issues like the Flyte console UI not showing any project when multiple were created. This scenario came with relative frequency among users evaluating Flyte in non-production environments. Now, CORS will be enabled on TLS connections too. + + +## Changed +- Enhanced flexibility for Ray plugin configurations (#5933) + +This release makes the configuration of RayJobs more flexible, letting you pass Pod specs independently for each Ray node type: Worker, Head, and Submitter. This enables you to declare configuration for each group to better align with your infrastructure requirements: +```yaml +ray_config = RayJobConfig( + head_node_config=HeadNodeConfig( + requests=Resources(mem="64Gi", cpu="4"), + limits=Resources(mem="64Gi", cpu="4") + pod_template_name = "ray_head_nodeÄ + ), + worker_node_config=[ + WorkerNodeConfig( + group_name="V100-group", + replicas=4, + requests=Resources(mem="256Gi", cpu="64", gpu="1"), + limits=Resources(mem="256Gi", cpu="64", gpu="1"), + pod_template = V1PodSpec(node_selector={"node_group": "V100"}), + ), + WorkerNodeConfig( + group_name="A100-group", + replicas=2, + requests=Resources(mem="480Gi", cpu="60", gpu="2"), + limits=Resources(mem="480Gi", cpu="60", gpu="2") + pod_template = V1PodSpec(node_selector={"node_group": "A100"}), + ) + ], +) +``` + +## Breaking +As python 3.8 hit the End of Life period in October 2024, starting with this release, flytekit requires Python >=3.9. + + +## Full changelog +* Flyte docs overhaul (phase 1) by @neverett in https://github.com/flyteorg/flyte/pull/5772 +* [Flyte][3][flytepropeller][Attribute Access][flytectl] Binary IDL With MessagePack by @Future-Outlier in https://github.com/flyteorg/flyte/pull/5763 +* Update aws-go-sdk to v1.47.11 to support EKS Pod Identity by @mthemis-provenir in https://github.com/flyteorg/flyte/pull/5796 +* Add minimal version to failure nodes docs by @eapolinario in https://github.com/flyteorg/flyte/pull/5812 +* Add finalizer to avoid premature CRD Deletion by @RRap0so in https://github.com/flyteorg/flyte/pull/5788 +* [Docs] Use Pure Dataclass In Example by @Future-Outlier in https://github.com/flyteorg/flyte/pull/5829 +* Link to github for flytekit docs by @thomasjpfan in https://github.com/flyteorg/flyte/pull/5831 +* Added the documentation about uniqueness of execution IDs by @Murdock9803 in https://github.com/flyteorg/flyte/pull/5828 +* DOC-648 Add wandb and neptune dependencies by @neverett in https://github.com/flyteorg/flyte/pull/5842 +* Update contributing docs by @neverett in https://github.com/flyteorg/flyte/pull/5830 +* Update schedules.md by @RaghavMangla in https://github.com/flyteorg/flyte/pull/5826 +* [flytectl] Use Protobuf Struct as dataclass Input for backward compatibility by @Future-Outlier in https://github.com/flyteorg/flyte/pull/5840 +* Add David as codeowner of deployment docs by @neverett in https://github.com/flyteorg/flyte/pull/5841 +* Add an error for file size exceeded to prevent system retries by @wild-endeavor in https://github.com/flyteorg/flyte/pull/5725 +* Add perian dependency for PERIAN plugin by @neverett in https://github.com/flyteorg/flyte/pull/5848 +* [FlyteCopilot] Binary IDL Attribute Access Primitive Input by @Future-Outlier in https://github.com/flyteorg/flyte/pull/5850 +* [Docs] Update for image spec/fast register notes by @wild-endeavor in https://github.com/flyteorg/flyte/pull/5726 +* [Docs] add practical example to improve data management doc by @DenChenn in https://github.com/flyteorg/flyte/pull/5844 +* Handle CORS in secure connections by @eapolinario in https://github.com/flyteorg/flyte/pull/5855 +* Update Grafana User dashboard by @davidmirror-ops in https://github.com/flyteorg/flyte/pull/5703 +* RFC: Community plugins by @davidmirror-ops in https://github.com/flyteorg/flyte/pull/5610 +* Fix link to contributing docs by @Sovietaced in https://github.com/flyteorg/flyte/pull/5869 +* [Docs] add streaming support example for file and directory by @DenChenn in https://github.com/flyteorg/flyte/pull/5879 +* [Docs] Align Code lines of StructuredDataset with Flytesnacks Example by @JiangJiaWei1103 in https://github.com/flyteorg/flyte/pull/5874 +* Sync client should call CloseSend when done sending by @RRap0so in https://github.com/flyteorg/flyte/pull/5884 +* Upstream contributions from Union.ai by @andrewwdye in https://github.com/flyteorg/flyte/pull/5769 +* Remove duplicate recovery interceptor by @Sovietaced in https://github.com/flyteorg/flyte/pull/5870 +* fix: failed to make scheduler under flyteadmin by @lowc1012 in https://github.com/flyteorg/flyte/pull/5866 +* [Docs] Recover the expected behaviors of example workflows by @JiangJiaWei1103 in https://github.com/flyteorg/flyte/pull/5880 +* Use child context in watchAgents to avoid goroutine leak by @pingsutw in https://github.com/flyteorg/flyte/pull/5888 +* [docs]add cache information in raw containers doc by @popojk in https://github.com/flyteorg/flyte/pull/5876 +* Clarify behavior of interruptible map tasks by @RaghavMangla in https://github.com/flyteorg/flyte/pull/5845 +* [Docs] Simplifying for better user understanding by @10sharmashivam in https://github.com/flyteorg/flyte/pull/5878 +* Update ray.go not to fail when going suspend state. by @aminmaghsodi in https://github.com/flyteorg/flyte/pull/5816 +* Add dependency review gh workflow by @eapolinario in https://github.com/flyteorg/flyte/pull/5902 +* [Docs] improve contribute docs by @DenChenn in https://github.com/flyteorg/flyte/pull/5862 +* Fix CONTRIBUTING.md and update by @taieeuu in https://github.com/flyteorg/flyte/pull/5873 +* Add pod template support for init containers by @Sovietaced in https://github.com/flyteorg/flyte/pull/5750 +* Update monitoring docs by @davidmirror-ops in https://github.com/flyteorg/flyte/pull/5903 +* feat: add support for seedProjects with descriptions via seedProjectsWithDetails by @Terryhung in https://github.com/flyteorg/flyte/pull/5864 +* Quick fix for monitoring docs YAML by @davidmirror-ops in https://github.com/flyteorg/flyte/pull/5917 +* Added documentation about Fixed and unique domains by @Murdock9803 in https://github.com/flyteorg/flyte/pull/5923 +* Remove unnecessary joins for list node and task execution entities in flyteadmin db queries by @katrogan in https://github.com/flyteorg/flyte/pull/5935 +* Binary IDL Attribute Access for Map Task by @Future-Outlier in https://github.com/flyteorg/flyte/pull/5942 +* Improve Documentation for Registering Workflows to avoid confusion by @sumana-2705 in https://github.com/flyteorg/flyte/pull/5941 +* Added some CICD best practices to the documentation by @Murdock9803 in https://github.com/flyteorg/flyte/pull/5827 +* [Docs]Document clarifying notes about the data lifecycle by @popojk in https://github.com/flyteorg/flyte/pull/5922 +* fixed [Docs] Spot/interruptible docs imply retries come from the user… Closes #3956 by @ap0calypse8 in https://github.com/flyteorg/flyte/pull/5938 +* Added new page under Data types and IO section for tensorflow_types in flyte documentation by @sumana-2705 in https://github.com/flyteorg/flyte/pull/5807 +* Add basic SASL and TLS support for Kafka cloud events by @Sovietaced in https://github.com/flyteorg/flyte/pull/5814 +* Fix broken links by @ppiegaze in https://github.com/flyteorg/flyte/pull/5946 +* feat: improve registration patterns docs by @siiddhantt in https://github.com/flyteorg/flyte/pull/5808 +* Improve literal type string representation handling by @pingsutw in https://github.com/flyteorg/flyte/pull/5932 +* Update propeller sharding docs - types needs to be capitalized by @cpaulik in https://github.com/flyteorg/flyte/pull/5860 +* fix: align the default config output by @Terryhung in https://github.com/flyteorg/flyte/pull/5947 +* [Docs] Fix doc links to blob literal and type by @JiangJiaWei1103 in https://github.com/flyteorg/flyte/pull/5952 +* Fix remaining misuses of capturing the default file descriptors in flytectl unit tests by @eapolinario in https://github.com/flyteorg/flyte/pull/5950 +* Reduce where clause fanout when updating workflow, node & task executions by @katrogan in https://github.com/flyteorg/flyte/pull/5953 +* [flyteadmin][API] get control plane version by @Future-Outlier in https://github.com/flyteorg/flyte/pull/5934 +* Add multi file error aggregation strategy by @bgedik in https://github.com/flyteorg/flyte/pull/5795 +* Fix: avoid log spam for log links generated during the pod's pending phase by @fg91 in https://github.com/flyteorg/flyte/pull/5945 +* [Docs] Align Note with the Output Naming Convention by @JiangJiaWei1103 in https://github.com/flyteorg/flyte/pull/5919 +* [DOC] add copy command examples and description by @mao3267 in https://github.com/flyteorg/flyte/pull/5782 +* Hide generated launch plans starting with .flytegen in the UI by @troychiu in https://github.com/flyteorg/flyte/pull/5949 +* Fix link in flyteidl README.md by @amitani in https://github.com/flyteorg/flyte/pull/5957 +* Fix indentation for security block in auth_setup.rst by @jkhales in https://github.com/flyteorg/flyte/pull/5968 +* [copilot][flytedirectory] multipart blob download by @wayner0628 in https://github.com/flyteorg/flyte/pull/5715 +* Mark NamedEntityState reserved enum values by @katrogan in https://github.com/flyteorg/flyte/pull/5975 +* [flytepropeller] Add tests in v1alpha by @DenChenn in https://github.com/flyteorg/flyte/pull/5896 +* fix(admin): validate cron expression in launch plan schedule by @peterxcli in https://github.com/flyteorg/flyte/pull/5951 +* [flytectl][MSGPACK IDL] Gate feature by setting ENV by @Future-Outlier in https://github.com/flyteorg/flyte/pull/5976 +* Ray Plugin - Use serviceAccount from Config if set by @peterghaddad in https://github.com/flyteorg/flyte/pull/5928 +* DOC-666 Change "Accessing attributes" to "Accessing attributes in workflows" by @ppiegaze in https://github.com/flyteorg/flyte/pull/5886 +* [Docs] Align code lines of TensorFlow types with flytesnacks by @JiangJiaWei1103 in https://github.com/flyteorg/flyte/pull/5983 +* Decouple ray submitter, worker, and head resources by @Sovietaced in https://github.com/flyteorg/flyte/pull/5933 +* [Upstream] [COR-2297/] Fix nested offloaded type validation (#552) by @pmahindrakar-oss in https://github.com/flyteorg/flyte/pull/5996 +* docs(contribute): rewrite flyte contribute docs based on 4960 by @bearomorphism in https://github.com/flyteorg/flyte/pull/5260 +* Upstream changes to fix token validity and utilizing inmemory creds source by @pmahindrakar-oss in https://github.com/flyteorg/flyte/pull/6001 +* [Housekeeping] Enable lint flytecopilot by @wayner0628 in https://github.com/flyteorg/flyte/pull/6003 +* Version flyte-binary helm chart and use flyte-binary-release docker images in releases by @eapolinario in https://github.com/flyteorg/flyte/pull/6010 +* Docs rli bug by @cosmicBboy in https://github.com/flyteorg/flyte/pull/6008 +* Remove unused environment variable by @bgedik in https://github.com/flyteorg/flyte/pull/5987 +* Correct stow listing function by @bgedik in https://github.com/flyteorg/flyte/pull/6013 +* Replace other instances of rli by @eapolinario in https://github.com/flyteorg/flyte/pull/6014 +* Set HttpOnly and Secure flags in session cookies by @eapolinario in https://github.com/flyteorg/flyte/pull/5911 +* Run CI in release branches by @eapolinario in https://github.com/flyteorg/flyte/pull/5901 +* Update CODEOWNERS - remove nikki by @eapolinario in https://github.com/flyteorg/flyte/pull/6015 +* Adopt protogetter by @eapolinario in https://github.com/flyteorg/flyte/pull/5981 +* Add visibility control to dynamic log links by @pingsutw in https://github.com/flyteorg/flyte/pull/6000 +* Bump github.com/golang-jwt/jwt/v4 from 4.5.0 to 4.5.1 by @dependabot in https://github.com/flyteorg/flyte/pull/6017 +* Bump github.com/golang-jwt/jwt/v4 from 4.5.0 to 4.5.1 in /flyteadmin by @dependabot in https://github.com/flyteorg/flyte/pull/6020 +* fix: return the config file not found error by @Terryhung in https://github.com/flyteorg/flyte/pull/5972 +* Add option to install CRD as a part of chart install in flyte-binary by @marrrcin in https://github.com/flyteorg/flyte/pull/5967 +* Deterministic error propagation for distributed (training) tasks by @fg91 in https://github.com/flyteorg/flyte/pull/5598 +* Add DedupingBucketRateLimiter by @andrewwdye in https://github.com/flyteorg/flyte/pull/6025 +* docs: update command for running the single binary by @machichima in https://github.com/flyteorg/flyte/pull/6031 +* Inject offloading literal env vars by @eapolinario in https://github.com/flyteorg/flyte/pull/6027 +* Fix Union type with dataclass ambiguous error and support superset comparison by @mao3267 in https://github.com/flyteorg/flyte/pull/5858 +* [Fix] Add logger for compiler and marshal while comparing union by @mao3267 in https://github.com/flyteorg/flyte/pull/6034 +* How to verify that the grpc service of flyteadmin works as expected by @popojk in https://github.com/flyteorg/flyte/pull/5958 +* auto-update contributors by @flyte-bot in https://github.com/flyteorg/flyte/pull/3601 +* [Docs] MessagePack IDL, Pydantic Support, and Attribute Access by @Future-Outlier in https://github.com/flyteorg/flyte/pull/6022 +* Revert "[COR-2297/] Fix nested offloaded type validation (#552) (#5996)" by @eapolinario in https://github.com/flyteorg/flyte/pull/6045 +* Bump docker/build-push-action to v6 by @lowc1012 in https://github.com/flyteorg/flyte/pull/5747 +* Add a new AgentError field in the agent protos by @RRap0so in https://github.com/flyteorg/flyte/pull/5916 +* Update contribute_code.rst by @davidlin20dev in https://github.com/flyteorg/flyte/pull/6009 +* Mark execution mode enum reserved by @katrogan in https://github.com/flyteorg/flyte/pull/6040 +* Add is_eager bit to indicate eager tasks in flyte system by @pmahindrakar-oss in https://github.com/flyteorg/flyte/pull/6041 +* Fix: Make appProtocols optional in flyteadmin and flyteconsole services in helm chart by @fg91 in https://github.com/flyteorg/flyte/pull/5944 +* Fix: customize demo cluster port by @vincent0426 in https://github.com/flyteorg/flyte/pull/5969 +* helm: Add support for passing env variables to flyteadmin using envFrom by @ctso in https://github.com/flyteorg/flyte/pull/5216 +* Adding support for downloading offloaded literal in copilot by @pmahindrakar-oss in https://github.com/flyteorg/flyte/pull/6048 +* Add tolerations for extended resources by @troychiu in https://github.com/flyteorg/flyte/pull/6033 +* [Docs][flyteagent] Added description of exception deletion cases. by @SZL741023 in https://github.com/flyteorg/flyte/pull/6039 +* Update note about Terraform reference implementations by @davidmirror-ops in https://github.com/flyteorg/flyte/pull/6056 +* Minor fixes to single-cloud page by @davidmirror-ops in https://github.com/flyteorg/flyte/pull/6059 +* [Docs] Align sd code lines with Flytesnacks example by @JiangJiaWei1103 in https://github.com/flyteorg/flyte/pull/6063 +* [copilot] rename sidecar to uploader by @wayner0628 in https://github.com/flyteorg/flyte/pull/6043 +* add sub_node_interface field to array node by @pvditt in https://github.com/flyteorg/flyte/pull/6018 +* [demo sandbox] Add env vars to plugin config by @wild-endeavor in https://github.com/flyteorg/flyte/pull/6072 +* Fix inconsistent code examples in caching.md documentation by @davidlin20dev in https://github.com/flyteorg/flyte/pull/6077 +* Support ArrayNode subNode timeouts by @hamersaw in https://github.com/flyteorg/flyte/pull/6054 +* [WIP] docs: add raise user error section by @machichima in https://github.com/flyteorg/flyte/pull/6084 +* Enable literal offloading in flyte-binary and flyte-core by @eapolinario in https://github.com/flyteorg/flyte/pull/6087 +* Special-case type annotations to force cache invalidations for msgpack-binary literals by @eapolinario in https://github.com/flyteorg/flyte/pull/6078 +* Regenerate flytectl docs by @neverett in https://github.com/flyteorg/flyte/pull/5914 +* [ Docs ] fix: contribute link by @taieeuu in https://github.com/flyteorg/flyte/pull/6076 +* Update single-binary.yml end2end_execute by @taieeuu in https://github.com/flyteorg/flyte/pull/6061 +* fix: modify deprecated functions by @machichima in https://github.com/flyteorg/flyte/pull/6052 +* Use an empty config file to produce docs by @eapolinario in https://github.com/flyteorg/flyte/pull/6092 + +## New Contributors +* @mthemis-provenir made their first contribution in https://github.com/flyteorg/flyte/pull/5796 +* @Murdock9803 made their first contribution in https://github.com/flyteorg/flyte/pull/5828 +* @RaghavMangla made their first contribution in https://github.com/flyteorg/flyte/pull/5826 +* @DenChenn made their first contribution in https://github.com/flyteorg/flyte/pull/5844 +* @JiangJiaWei1103 made their first contribution in https://github.com/flyteorg/flyte/pull/5874 +* @popojk made their first contribution in https://github.com/flyteorg/flyte/pull/5876 +* @10sharmashivam made their first contribution in https://github.com/flyteorg/flyte/pull/5878 +* @aminmaghsodi made their first contribution in https://github.com/flyteorg/flyte/pull/5816 +* @taieeuu made their first contribution in https://github.com/flyteorg/flyte/pull/5873 +* @Terryhung made their first contribution in https://github.com/flyteorg/flyte/pull/5864 +* @sumana-2705 made their first contribution in https://github.com/flyteorg/flyte/pull/5941 +* @ap0calypse8 made their first contribution in https://github.com/flyteorg/flyte/pull/5938 +* @siiddhantt made their first contribution in https://github.com/flyteorg/flyte/pull/5808 +* @mao3267 made their first contribution in https://github.com/flyteorg/flyte/pull/5782 +* @amitani made their first contribution in https://github.com/flyteorg/flyte/pull/5957 +* @jkhales made their first contribution in https://github.com/flyteorg/flyte/pull/5968 +* @peterxcli made their first contribution in https://github.com/flyteorg/flyte/pull/5951 +* @bearomorphism made their first contribution in https://github.com/flyteorg/flyte/pull/5260 +* @marrrcin made their first contribution in https://github.com/flyteorg/flyte/pull/5967 +* @machichima made their first contribution in https://github.com/flyteorg/flyte/pull/6031 +* @davidlin20dev made their first contribution in https://github.com/flyteorg/flyte/pull/6009 +* @vincent0426 made their first contribution in https://github.com/flyteorg/flyte/pull/5969 +* @ctso made their first contribution in https://github.com/flyteorg/flyte/pull/5216 +* @SZL741023 made their first contribution in https://github.com/flyteorg/flyte/pull/6039 + +**Link to full changelog**: https://github.com/flyteorg/flyte/compare/v1.13.3...v1.14.0 + diff --git a/CHANGELOG/CHANGELOG-v1.14.1.md b/CHANGELOG/CHANGELOG-v1.14.1.md new file mode 100644 index 0000000000..64b95dd4be --- /dev/null +++ b/CHANGELOG/CHANGELOG-v1.14.1.md @@ -0,0 +1,19 @@ +# Flyte 1.14.1 Release Notes + +* Update flytestdlib and affected tools (copilot) for missing config. + +## What's Changed +* docs: Refactor merge sort code example to use literalinclude by @davidlin20dev in https://github.com/flyteorg/flyte/pull/6091 +* [DOCS] Using ImageSpec in ContainerTask by @machichima in https://github.com/flyteorg/flyte/pull/6095 +* Eager doc updates by @wild-endeavor in https://github.com/flyteorg/flyte/pull/6099 +* Revert "fix: return the config file not found error" by @eapolinario in https://github.com/flyteorg/flyte/pull/6100 +* Remove notes on deprecated Batch size by @wild-endeavor in https://github.com/flyteorg/flyte/pull/6102 +* Upstream: Add labels to published execution events by @katrogan in https://github.com/flyteorg/flyte/pull/6104 +* Fix: Make distributed error aggregation opt-in by @fg91 in https://github.com/flyteorg/flyte/pull/6103 +* Add default labels and annotations to Ray worker pods too. by @katrogan in https://github.com/flyteorg/flyte/pull/6107 +* Fix: Remove the default search dialog if it exists (on CMD + K) by @chmod77 in https://github.com/flyteorg/flyte/pull/6106 + +## New Contributors +* @chmod77 made their first contribution in https://github.com/flyteorg/flyte/pull/6106 + +**Full Changelog**: https://github.com/flyteorg/flyte/compare/v1.14.0...v1.14.1 diff --git a/CODEOWNERS b/CODEOWNERS index da2c5bdd5c..635c0d4326 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,3 +1,3 @@ # Automatically request docs team for docs PR review -/docs/ @neverett @ppiegaze +/docs/ @ppiegaze /docs/deployment/ @davidmirror-ops diff --git a/Dockerfile b/Dockerfile index fc32351ebe..ead022d036 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,8 +5,8 @@ FROM ghcr.io/flyteorg/flyteconsole:${FLYTECONSOLE_VERSION} AS flyteconsole FROM --platform=${BUILDPLATFORM} golang:1.22-bookworm AS flytebuilder ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux +ENV GOARCH="${TARGETARCH}" +ENV GOOS=linux WORKDIR /flyteorg/build @@ -29,10 +29,10 @@ RUN --mount=type=cache,target=/root/.cache/go-build --mount=type=cache,target=/r FROM debian:bookworm-slim ARG FLYTE_VERSION -ENV FLYTE_VERSION "${FLYTE_VERSION}" +ENV FLYTE_VERSION="${FLYTE_VERSION}" -ENV DEBCONF_NONINTERACTIVE_SEEN true -ENV DEBIAN_FRONTEND noninteractive +ENV DEBCONF_NONINTERACTIVE_SEEN=true +ENV DEBIAN_FRONTEND=noninteractive # Install core packages RUN apt-get update && apt-get install --no-install-recommends --yes \ diff --git a/Dockerfile.datacatalog b/Dockerfile.datacatalog index b6bfc8707d..80f683a40f 100644 --- a/Dockerfile.datacatalog +++ b/Dockerfile.datacatalog @@ -3,12 +3,12 @@ # # TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst -FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 as builder +FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 AS builder ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux +ENV GOARCH="${TARGETARCH}" +ENV GOOS=linux RUN apk add git openssh-client make curl diff --git a/Dockerfile.flyteadmin b/Dockerfile.flyteadmin index 2fe21cccc2..9e33229959 100644 --- a/Dockerfile.flyteadmin +++ b/Dockerfile.flyteadmin @@ -3,11 +3,11 @@ # # TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst -FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 as builder +FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 AS builder ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux +ENV GOARCH="${TARGETARCH}" +ENV GOOS=linux RUN apk add git openssh-client make curl @@ -41,7 +41,7 @@ ENV PATH="/artifacts:${PATH}" # This will eventually move to centurylink/ca-certs:latest for minimum possible image size FROM alpine:3.18 -LABEL org.opencontainers.image.source https://github.com/flyteorg/flyteadmin +LABEL org.opencontainers.image.source=https://github.com/flyteorg/flyteadmin COPY --from=builder /artifacts /bin diff --git a/Dockerfile.flytecopilot b/Dockerfile.flytecopilot index 44107f1097..66e1d2c295 100644 --- a/Dockerfile.flytecopilot +++ b/Dockerfile.flytecopilot @@ -3,12 +3,12 @@ # # TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst -FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 as builder +FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 AS builder ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux +ENV GOARCH="${TARGETARCH}" +ENV GOOS=linux RUN apk add git openssh-client make curl @@ -32,7 +32,7 @@ ENV PATH="/artifacts:${PATH}" # This will eventually move to centurylink/ca-certs:latest for minimum possible image size FROM alpine:3.18 -LABEL org.opencontainers.image.source https://github.com/lyft/flyteplugins +LABEL org.opencontainers.image.source=https://github.com/lyft/flyteplugins COPY --from=builder /artifacts /bin diff --git a/Dockerfile.flytepropeller b/Dockerfile.flytepropeller index 058d78b219..4963a530a4 100644 --- a/Dockerfile.flytepropeller +++ b/Dockerfile.flytepropeller @@ -4,12 +4,12 @@ # TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst -FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 as builder +FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 AS builder ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux +ENV GOARCH="${TARGETARCH}" +ENV GOOS=linux RUN apk add git openssh-client make curl @@ -33,7 +33,7 @@ ENV PATH="/artifacts:${PATH}" # This will eventually move to centurylink/ca-certs:latest for minimum possible image size FROM alpine:3.18 -LABEL org.opencontainers.image.source https://github.com/flyteorg/flytepropeller +LABEL org.opencontainers.image.source=https://github.com/flyteorg/flytepropeller COPY --from=builder /artifacts /bin diff --git a/Dockerfile.flytescheduler b/Dockerfile.flytescheduler index b22d050fc9..a7119d0d9b 100644 --- a/Dockerfile.flytescheduler +++ b/Dockerfile.flytescheduler @@ -4,12 +4,12 @@ # TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst -FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 as builder +FROM --platform=${BUILDPLATFORM} golang:1.22-alpine3.18 AS builder ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux +ENV GOARCH="${TARGETARCH}" +ENV GOOS=linux RUN apk add git openssh-client make curl @@ -36,7 +36,7 @@ ENV PATH="/artifacts:${PATH}" # This will eventually move to centurylink/ca-certs:latest for minimum possible image size FROM alpine:3.18 -LABEL org.opencontainers.image.source https://github.com/flyteorg/flyteadmin +LABEL org.opencontainers.image.source=https://github.com/flyteorg/flyteadmin COPY --from=builder /artifacts /bin diff --git a/Makefile b/Makefile index a8ac961f02..eacc4c69ae 100644 --- a/Makefile +++ b/Makefile @@ -135,6 +135,7 @@ go-tidy: make -C flyteplugins go-tidy make -C flytestdlib go-tidy make -C flytecopilot go-tidy + make -C flytectl go-tidy .PHONY: lint-helm-charts lint-helm-charts: diff --git a/README.md b/README.md index 3bf96ba0c6..d5f2aab54f 100644 --- a/README.md +++ b/README.md @@ -151,15 +151,13 @@ There are many ways to get involved in Flyte, including: - Submitting [bugs](https://github.com/flyteorg/flyte/issues/new?assignees=&labels=bug%2Cuntriaged&template=bug_report.yaml&title=%5BBUG%5D+) and [feature requests](https://github.com/flyteorg/flyte/issues/new?assignees=&labels=enhancement%2Cuntriaged&template=feature_request.yaml&title=%5BCore+feature%5D+) for various components. - Reviewing [the documentation](https://docs.flyte.org/en/latest/) and submitting [pull requests](https://github.com/flyteorg/flytesnacks) for anything from fixing typos to adding new content. - Speaking or writing about Flyte or any other ecosystem integration and [letting us know](https://flyte-org.slack.com/archives/C02JMT8KTEE)! -- Taking on a [`help wanted`](https://github.com/flyteorg/flyte/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22+) or [`good-first-issue`](https://github.com/flyteorg/flyte/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) and following the [CONTRIBUTING](https://docs.flyte.org/en/latest/community/contribute.html) guide to submit changes to the codebase. +- Taking on a [`help wanted`](https://github.com/flyteorg/flyte/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22+) or [`good-first-issue`](https://github.com/flyteorg/flyte/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) and following the [CONTRIBUTING](https://docs.flyte.org/en/latest/community/contribute/index.html) guide to submit changes to the codebase. - Upvoting [popular feature requests](https://github.com/flyteorg/flyte/issues?q=is%3Aopen+is%3Aissue+label%3Aenhancement+sort%3Areactions-%2B1-desc) to show your support. ### We :heart: our contributors - -[![953358](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/953358?v=4&w=50&h=50&mask=circle)](https://github.com/katrogan)[![37090125](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37090125?v=4&w=50&h=50&mask=circle)](https://github.com/lyft-metaservice-3)[![7597118](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7597118?v=4&w=50&h=50&mask=circle)](https://github.com/matthewphsmith)[![27159](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27159?v=4&w=50&h=50&mask=circle)](https://github.com/EngHabu)[![29843943](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/29843943?v=4&w=50&h=50&mask=circle)](https://github.com/goreleaserbot)[![10830562](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10830562?v=4&w=50&h=50&mask=circle)](https://github.com/evalsocket)[![8888115](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8888115?v=4&w=50&h=50&mask=circle)](https://github.com/hamersaw)[![78108056](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/78108056?v=4&w=50&h=50&mask=circle)](https://github.com/flyte-bot)[![158892](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/158892?v=4&w=50&h=50&mask=circle)](https://github.com/honnix)[![18408237](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18408237?v=4&w=50&h=50&mask=circle)](https://github.com/anandswaminathan)[![2896568](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2896568?v=4&w=50&h=50&mask=circle)](https://github.com/wild-endeavor)[![1518524](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1518524?v=4&w=50&h=50&mask=circle)](https://github.com/bnsblue)[![37936015](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37936015?v=4&w=50&h=50&mask=circle)](https://github.com/pingsutw)[![27724763](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27724763?v=4&w=50&h=50&mask=circle)](https://github.com/iaroslav-ciupin)[![16888709](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16888709?v=4&w=50&h=50&mask=circle)](https://github.com/kumare3)[![27777173](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27777173?v=4&w=50&h=50&mask=circle)](https://github.com/samhita-alla)[![452166](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/452166?v=4&w=50&h=50&mask=circle)](https://github.com/MorpheusXAUT)[![4748985](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4748985?v=4&w=50&h=50&mask=circle)](https://github.com/aliabbasjaffri)[![6562898](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6562898?v=4&w=50&h=50&mask=circle)](https://github.com/ckiosidis)[![6239450](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6239450?v=4&w=50&h=50&mask=circle)](https://github.com/mayitbeegh)[![8805803](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8805803?v=4&w=50&h=50&mask=circle)](https://github.com/alexlipa91)[![5032356](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5032356?v=4&w=50&h=50&mask=circle)](https://github.com/brucearctor)[![77798312](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/77798312?v=4&w=50&h=50&mask=circle)](https://github.com/pmahindrakar-oss)[![23062603](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23062603?v=4&w=50&h=50&mask=circle)](https://github.com/Antaxify)[![653394](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/653394?v=4&w=50&h=50&mask=circle)](https://github.com/eapolinario)[![5725707](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5725707?v=4&w=50&h=50&mask=circle)](https://github.com/andrewwdye)[![8122852](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8122852?v=4&w=50&h=50&mask=circle)](https://github.com/ariefrahmansyah)[![10869815](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10869815?v=4&w=50&h=50&mask=circle)](https://github.com/jeevb)[![3880645](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3880645?v=4&w=50&h=50&mask=circle)](https://github.com/jonathanburns)[![3936213](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3936213?v=4&w=50&h=50&mask=circle)](https://github.com/lu4nm3)[![26174213](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26174213?v=4&w=50&h=50&mask=circle)](https://github.com/lyft-metaservice-2)[![9142716](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/9142716?v=4&w=50&h=50&mask=circle)](https://github.com/2uasimojo)[![5487021](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5487021?v=4&w=50&h=50&mask=circle)](https://github.com/veggiemonk)[![1815175](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1815175?v=4&w=50&h=50&mask=circle)](https://github.com/schottra)[![46989299](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/46989299?v=4&w=50&h=50&mask=circle)](https://github.com/supreeth7)[![2816689](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2816689?v=4&w=50&h=50&mask=circle)](https://github.com/cosmicBboy)[![19375241](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19375241?v=4&w=50&h=50&mask=circle)](https://github.com/migueltol22)[![6065051](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6065051?v=4&w=50&h=50&mask=circle)](https://github.com/milton0825)[![70988](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/70988?v=4&w=50&h=50&mask=circle)](https://github.com/slai)[![94349093](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/94349093?v=4&w=50&h=50&mask=circle)](https://github.com/SmritiSatyanV)[![16090976](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16090976?v=4&w=50&h=50&mask=circle)](https://github.com/surindersinghp)[![43610471](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43610471?v=4&w=50&h=50&mask=circle)](https://github.com/TheYk98)[![53313394](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/53313394?v=4&w=50&h=50&mask=circle)](https://github.com/kosigz-lyft)[![4967458](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4967458?v=4&w=50&h=50&mask=circle)](https://github.com/chanadian)[![467927](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/467927?v=4&w=50&h=50&mask=circle)](https://github.com/kanterov)[![248688](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/248688?v=4&w=50&h=50&mask=circle)](https://github.com/hanzo)[![1330233](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1330233?v=4&w=50&h=50&mask=circle)](https://github.com/igorvalko)[![31255434](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31255434?v=4&w=50&h=50&mask=circle)](https://github.com/kennyworkman)[![1472826](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1472826?v=4&w=50&h=50&mask=circle)](https://github.com/maximsmol)[![5026554](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5026554?v=4&w=50&h=50&mask=circle)](https://github.com/vsbus)[![34587798](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/34587798?v=4&w=50&h=50&mask=circle)](https://github.com/akhurana001)[![11799671](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11799671?v=4&w=50&h=50&mask=circle)](https://github.com/bstadlbauer)[![95110820](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/95110820?v=4&w=50&h=50&mask=circle)](https://github.com/jerempy)[![38207208](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/38207208?v=4&w=50&h=50&mask=circle)](https://github.com/tnsetting)[![8200209](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8200209?v=4&w=50&h=50&mask=circle)](https://github.com/catalinii)[![24364830](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24364830?v=4&w=50&h=50&mask=circle)](https://github.com/ByronHsu)[![43587819](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43587819?v=4&w=50&h=50&mask=circle)](https://github.com/chetcode)[![163899](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/163899?v=4&w=50&h=50&mask=circle)](https://github.com/regadas)[![36511035](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36511035?v=4&w=50&h=50&mask=circle)](https://github.com/fg91)[![22784654](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/22784654?v=4&w=50&h=50&mask=circle)](https://github.com/aybidi)[![1316881](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1316881?v=4&w=50&h=50&mask=circle)](https://github.com/akashkatipally)[![1777447](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1777447?v=4&w=50&h=50&mask=circle)](https://github.com/goyalankit)[![1360529](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1360529?v=4&w=50&h=50&mask=circle)](https://github.com/clairemcginty)[![104257](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/104257?v=4&w=50&h=50&mask=circle)](https://github.com/flixr)[![2538760](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2538760?v=4&w=50&h=50&mask=circle)](https://github.com/akumor)[![11970258](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11970258?v=4&w=50&h=50&mask=circle)](https://github.com/niliayu)[![19733683](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19733683?v=4&w=50&h=50&mask=circle)](https://github.com/snyk-bot)[![155087](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/155087?v=4&w=50&h=50&mask=circle)](https://github.com/derwiki)[![1399455](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1399455?v=4&w=50&h=50&mask=circle)](https://github.com/th0114nd)[![21109744](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/21109744?v=4&w=50&h=50&mask=circle)](https://github.com/AlekhyaSasi)[![49699333](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/29110?v=4&w=50&h=50&mask=circle)](https://github.com/apps/dependabot)[![1810591](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1810591?v=4&w=50&h=50&mask=circle)](https://github.com/asottile)[![80421934](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/80421934?v=4&w=50&h=50&mask=circle)](https://github.com/SandraGH5)[![3939659](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3939659?v=4&w=50&h=50&mask=circle)](https://github.com/sbrunk)[![9609986](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/9609986?v=4&w=50&h=50&mask=circle)](https://github.com/sonjaer)[![12219405](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12219405?v=4&w=50&h=50&mask=circle)](https://github.com/fediazgon)[![98349643](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/98349643?v=4&w=50&h=50&mask=circle)](https://github.com/rahul-theorem)[![16509490](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16509490?v=4&w=50&h=50&mask=circle)](https://github.com/ryankarlos)[![6774758](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6774758?v=4&w=50&h=50&mask=circle)](https://github.com/ddhirajkumar)[![18337807](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18337807?v=4&w=50&h=50&mask=circle)](https://github.com/max-hoffman)[![322624](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/322624?v=4&w=50&h=50&mask=circle)](https://github.com/AdrianoKF)[![1168692](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1168692?v=4&w=50&h=50&mask=circle)](https://github.com/dennisobrien)[![91385411](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/91385411?v=4&w=50&h=50&mask=circle)](https://github.com/Ln11211)[![30621230](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/30621230?v=4&w=50&h=50&mask=circle)](https://github.com/aeioulisa)[![54334265](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54334265?v=4&w=50&h=50&mask=circle)](https://github.com/michaels-lyft)[![48736656](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/48736656?v=4&w=50&h=50&mask=circle)](https://github.com/murilommen)[![17165004](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17165004?v=4&w=50&h=50&mask=circle)](https://github.com/RobertoRRW)[![30375389](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/30375389?v=4&w=50&h=50&mask=circle)](https://github.com/bimtauer)[![97543480](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/97543480?v=4&w=50&h=50&mask=circle)](https://github.com/esadler-hbo)[![69013027](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/69013027?v=4&w=50&h=50&mask=circle)](https://github.com/ggydush-fn)[![116700206](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/116700206?v=4&w=50&h=50&mask=circle)](https://github.com/kiliangojek)[![422486](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/422486?v=4&w=50&h=50&mask=circle)](https://github.com/bethebunny)[![54333860](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54333860?v=4&w=50&h=50&mask=circle)](https://github.com/aalavian)[![7005765](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7005765?v=4&w=50&h=50&mask=circle)](https://github.com/convexquad)[![4025771](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4025771?v=4&w=50&h=50&mask=circle)](https://github.com/andresgomezfrr)[![48966647](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/48966647?v=4&w=50&h=50&mask=circle)](https://github.com/asahalyft)[![77167782](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/77167782?v=4&w=50&h=50&mask=circle)](https://github.com/apatel-fn)[![23013825](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23013825?v=4&w=50&h=50&mask=circle)](https://github.com/arpitbhardwaj)[![31381038](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31381038?v=4&w=50&h=50&mask=circle)](https://github.com/lordnodd)[![4396228](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4396228?v=4&w=50&h=50&mask=circle)](https://github.com/bryanwweber)[![6288302](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6288302?v=4&w=50&h=50&mask=circle)](https://github.com/CalvinLeather)[![23107192](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23107192?v=4&w=50&h=50&mask=circle)](https://github.com/YmirKhang)[![121866694](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/121866694?v=4&w=50&h=50&mask=circle)](https://github.com/franco-bocci)[![7358951](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7358951?v=4&w=50&h=50&mask=circle)](https://github.com/frsann)[![33652917](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/33652917?v=4&w=50&h=50&mask=circle)](https://github.com/hfurkanvural)[![6984748](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6984748?v=4&w=50&h=50&mask=circle)](https://github.com/jbrambleDC)[![488594](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/488594?v=4&w=50&h=50&mask=circle)](https://github.com/jcugat)[![20173739](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20173739?v=4&w=50&h=50&mask=circle)](https://github.com/madhur-tandon)[![34498039](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/34498039?v=4&w=50&h=50&mask=circle)](https://github.com/matheusMoreno)[![19853373](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19853373?v=4&w=50&h=50&mask=circle)](https://github.com/NotMatthewGriffin)[![10376195](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10376195?v=4&w=50&h=50&mask=circle)](https://github.com/myz540)[![125105](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/125105?v=4&w=50&h=50&mask=circle)](https://github.com/tekumara)[![1153481](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1153481?v=4&w=50&h=50&mask=circle)](https://github.com/ppiegaze)[![37170063](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37170063?v=4&w=50&h=50&mask=circle)](https://github.com/Qiwen-Yu)[![2614101](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2614101?v=4&w=50&h=50&mask=circle)](https://github.com/RobinKa)[![4308533](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4308533?v=4&w=50&h=50&mask=circle)](https://github.com/rubenbarragan)[![10201242](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10201242?v=4&w=50&h=50&mask=circle)](https://github.com/sugatoray)[![11269256](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11269256?v=4&w=50&h=50&mask=circle)](https://github.com/sushrut111)[![61228633](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/61228633?v=4&w=50&h=50&mask=circle)](https://github.com/Tat-V)[![13070236](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13070236?v=4&w=50&h=50&mask=circle)](https://github.com/TeoZosa)[![8817639](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8817639?v=4&w=50&h=50&mask=circle)](https://github.com/ThomVett)[![17309187](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17309187?v=4&w=50&h=50&mask=circle)](https://github.com/datability-io)[![26834658](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26834658?v=4&w=50&h=50&mask=circle)](https://github.com/techytushar)[![5092599](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5092599?v=4&w=50&h=50&mask=circle)](https://github.com/vchowdhary)[![57967031](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/57967031?v=4&w=50&h=50&mask=circle)](https://github.com/varshaparthay)[![67166843](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/67166843?v=4&w=50&h=50&mask=circle)](https://github.com/vvasavada-fn)[![1778407](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1778407?v=4&w=50&h=50&mask=circle)](https://github.com/ybubnov)[![51814063](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/51814063?v=4&w=50&h=50&mask=circle)](https://github.com/Yicheng-Lu-llll)[![3741621](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3741621?v=4&w=50&h=50&mask=circle)](https://github.com/palchicz)[![12450632](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12450632?v=4&w=50&h=50&mask=circle)](https://github.com/ajsalow)[![35151789](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/35151789?v=4&w=50&h=50&mask=circle)](https://github.com/ggydush)[![13331724](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13331724?v=4&w=50&h=50&mask=circle)](https://github.com/martinlyra)[![119345186](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/119345186?v=4&w=50&h=50&mask=circle)](https://github.com/mcloney-ddm)[![1521126](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1521126?v=4&w=50&h=50&mask=circle)](https://github.com/pbrogan12)[![73247359](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/73247359?v=4&w=50&h=50&mask=circle)](https://github.com/stef-stripe)[![50860453](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/50860453?v=4&w=50&h=50&mask=circle)](https://github.com/charlie0220)[![6506810](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6506810?v=4&w=50&h=50&mask=circle)](https://github.com/stephen37)[![55718143](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/55718143?v=4&w=50&h=50&mask=circle)](https://github.com/anrusina)[![65977800](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/65977800?v=4&w=50&h=50&mask=circle)](https://github.com/service-github-lyft-semantic-release)[![6610300](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6610300?v=4&w=50&h=50&mask=circle)](https://github.com/ursucarina)[![84735036](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/84735036?v=4&w=50&h=50&mask=circle)](https://github.com/jsonporter)[![85753828](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/85753828?v=4&w=50&h=50&mask=circle)](https://github.com/csirius)[![101579322](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/101579322?v=4&w=50&h=50&mask=circle)](https://github.com/olga-union)[![26953709](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26953709?v=4&w=50&h=50&mask=circle)](https://github.com/Pianist038801)[![105876962](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/105876962?v=4&w=50&h=50&mask=circle)](https://github.com/james-union)[![25038146](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25038146?v=4&w=50&h=50&mask=circle)](https://github.com/eugenejahn)[![88684372](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/88684372?v=4&w=50&h=50&mask=circle)](https://github.com/4nalog)[![99441958](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/99441958?v=4&w=50&h=50&mask=circle)](https://github.com/apTalya)[![1388071](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1388071?v=4&w=50&h=50&mask=circle)](https://github.com/aviaviavi)[![58770001](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/58770001?v=4&w=50&h=50&mask=circle)](https://github.com/Professional0321)[![20668349](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20668349?v=4&w=50&h=50&mask=circle)](https://github.com/HiromuHota)[![100569684](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/100569684?v=4&w=50&h=50&mask=circle)](https://github.com/rafaelraposospot)[![17351764](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17351764?v=4&w=50&h=50&mask=circle)](https://github.com/daniel-shuy)[![6399428](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6399428?v=4&w=50&h=50&mask=circle)](https://github.com/live-wire)[![25695302](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25695302?v=4&w=50&h=50&mask=circle)](https://github.com/sisco0)[![18363301](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18363301?v=4&w=50&h=50&mask=circle)](https://github.com/jimbobby5)[![4023015](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4023015?v=4&w=50&h=50&mask=circle)](https://github.com/pradithya)[![3451399](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3451399?v=4&w=50&h=50&mask=circle)](https://github.com/skiptomyliu)[![25364490](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25364490?v=4&w=50&h=50&mask=circle)](https://github.com/haoyuez)[![50679871](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/50679871?v=4&w=50&h=50&mask=circle)](https://github.com/lupasarin)[![7548823](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7548823?v=4&w=50&h=50&mask=circle)](https://github.com/Dread1982)[![7515359](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7515359?v=4&w=50&h=50&mask=circle)](https://github.com/narape)[![31982395](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31982395?v=4&w=50&h=50&mask=circle)](https://github.com/alexapdev)[![62209650](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/62209650?v=4&w=50&h=50&mask=circle)](https://github.com/3t8)[![1892175](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1892175?v=4&w=50&h=50&mask=circle)](https://github.com/zeryx)[![200401](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/200401?v=4&w=50&h=50&mask=circle)](https://github.com/arturdryomov)[![13770222](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13770222?v=4&w=50&h=50&mask=circle)](https://github.com/ChickenTarm)[![2380665](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2380665?v=4&w=50&h=50&mask=circle)](https://github.com/DavidMertz)[![24739949](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24739949?v=4&w=50&h=50&mask=circle)](https://github.com/felixwang9817)[![10430635](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10430635?v=4&w=50&h=50&mask=circle)](https://github.com/juandiegopalomino)[![31911175](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31911175?v=4&w=50&h=50&mask=circle)](https://github.com/kanyesthaker)[![104152793](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/104152793?v=4&w=50&h=50&mask=circle)](https://github.com/marc-union)[![27818609](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27818609?v=4&w=50&h=50&mask=circle)](https://github.com/michaeltinsley)[![6486584](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6486584?v=4&w=50&h=50&mask=circle)](https://github.com/mucahitkantepe)[![321459](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/321459?v=4&w=50&h=50&mask=circle)](https://github.com/oyevtushok)[![35962310](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/35962310?v=4&w=50&h=50&mask=circle)](https://github.com/trishitapingolia)[![91927689](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/91927689?v=4&w=50&h=50&mask=circle)](https://github.com/Smartmind12)[![726061](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/726061?v=4&w=50&h=50&mask=circle)](https://github.com/huxuan)[![47872044](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/47872044?v=4&w=50&h=50&mask=circle)](https://github.com/privatedumbo)[![105229971](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/105229971?v=4&w=50&h=50&mask=circle)](https://github.com/tjKairos)[![405480](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/405480?v=4&w=50&h=50&mask=circle)](https://github.com/georgesnelling)[![1004789](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1004789?v=4&w=50&h=50&mask=circle)](https://github.com/dschaller)[![82604841](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/82604841?v=4&w=50&h=50&mask=circle)](https://github.com/davidmirror-ops)[![480621](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/480621?v=4&w=50&h=50&mask=circle)](https://github.com/davidxia)[![1335881](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1335881?v=4&w=50&h=50&mask=circle)](https://github.com/hoyajigi)[![100597998](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/100597998?v=4&w=50&h=50&mask=circle)](https://github.com/MrKrishnaAgarwal)[![4830700](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4830700?v=4&w=50&h=50&mask=circle)](https://github.com/NitinAgg)[![69161722](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/69161722?v=4&w=50&h=50&mask=circle)](https://github.com/noobkid2411)[![43336767](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43336767?v=4&w=50&h=50&mask=circle)](https://github.com/yongchand)[![25391173](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25391173?v=4&w=50&h=50&mask=circle)](https://github.com/nicklofaso)[![66388192](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/66388192?v=4&w=50&h=50&mask=circle)](https://github.com/mounesi)[![14992189](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14992189?v=4&w=50&h=50&mask=circle)](https://github.com/eanakhl)[![1175392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1175392?v=4&w=50&h=50&mask=circle)](https://github.com/adinin)[![7475946](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7475946?v=4&w=50&h=50&mask=circle)](https://github.com/anton-malakhov)[![11796986](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11796986?v=4&w=50&h=50&mask=circle)](https://github.com/avan-sh)[![304786](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/304786?v=4&w=50&h=50&mask=circle)](https://github.com/kinow)[![24402505](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24402505?v=4&w=50&h=50&mask=circle)](https://github.com/Daeruin)[![1659415](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1659415?v=4&w=50&h=50&mask=circle)](https://github.com/dav009)[![86911142](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/86911142?v=4&w=50&h=50&mask=circle)](https://github.com/idivyanshbansal)[![11456773](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11456773?v=4&w=50&h=50&mask=circle)](https://github.com/fvde)[![7490199](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7490199?v=4&w=50&h=50&mask=circle)](https://github.com/Lundez)[![10345184](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10345184?v=4&w=50&h=50&mask=circle)](https://github.com/hasukmistry)[![29532638](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/29532638?v=4&w=50&h=50&mask=circle)](https://github.com/rokrokss)[![14008978](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14008978?v=4&w=50&h=50&mask=circle)](https://github.com/jeremydonahue)[![9272376](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/9272376?v=4&w=50&h=50&mask=circle)](https://github.com/jonasdebeukelaer)[![1633460](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1633460?v=4&w=50&h=50&mask=circle)](https://github.com/jmcarp)[![3033592](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3033592?v=4&w=50&h=50&mask=circle)](https://github.com/kazesberger)[![19229049](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19229049?v=4&w=50&h=50&mask=circle)](https://github.com/lsena)[![36594527](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36594527?v=4&w=50&h=50&mask=circle)](https://github.com/mishmanners)[![8755869](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8755869?v=4&w=50&h=50&mask=circle)](https://github.com/paravatha)[![6528449](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6528449?v=4&w=50&h=50&mask=circle)](https://github.com/uschi2000)[![790725](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/790725?v=4&w=50&h=50&mask=circle)](https://github.com/rodrigobaron)[![576968](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/576968?v=4&w=50&h=50&mask=circle)](https://github.com/ronaldosaheki)[![36827492](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36827492?v=4&w=50&h=50&mask=circle)](https://github.com/shahwar9)[![133936](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/133936?v=4&w=50&h=50&mask=circle)](https://github.com/shihgianlee)[![10438373](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10438373?v=4&w=50&h=50&mask=circle)](https://github.com/SKalt)[![33272587](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/33272587?v=4&w=50&h=50&mask=circle)](https://github.com/samuel-sujith)[![580328](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/580328?v=4&w=50&h=50&mask=circle)](https://github.com/ilikedata)[![1027207](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1027207?v=4&w=50&h=50&mask=circle)](https://github.com/orf)[![16526627](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16526627?v=4&w=50&h=50&mask=circle)](https://github.com/vijaysaravana)[![10526540](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10526540?v=4&w=50&h=50&mask=circle)](https://github.com/yubofredwang)[![5346764](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5346764?v=4&w=50&h=50&mask=circle)](https://github.com/fsz285)[![22917741](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/22917741?v=4&w=50&h=50&mask=circle)](https://github.com/gigi-at-zymergen)[![40143026](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/40143026?v=4&w=50&h=50&mask=circle)](https://github.com/hampusrosvall)[![77197126](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/77197126?v=4&w=50&h=50&mask=circle)](https://github.com/hitarth01)[![300315](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/300315?v=4&w=50&h=50&mask=circle)](https://github.com/jcourteau)[![106815366](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/106815366?v=4&w=50&h=50&mask=circle)](https://github.com/jw0515)[![1568889](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1568889?v=4&w=50&h=50&mask=circle)](https://github.com/leorleor)[![937967](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/937967?v=4&w=50&h=50&mask=circle)](https://github.com/moose007)[![114232404](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/114232404?v=4&w=50&h=50&mask=circle)](https://github.com/sanjaychouhan-adf)[![14996868](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14996868?v=4&w=50&h=50&mask=circle)](https://github.com/v01dXYZ)[![93438190](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/93438190?v=4&w=50&h=50&mask=circle)](https://github.com/wanderer163)[![1043051](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1043051?v=4&w=50&h=50&mask=circle)](https://github.com/kylewaynebenson)[![21953442](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/21953442?v=4&w=50&h=50&mask=circle)](https://github.com/Gui11aum3)[![16461847](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16461847?v=4&w=50&h=50&mask=circle)](https://github.com/JakeNeyer)[![64676594](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/64676594?v=4&w=50&h=50&mask=circle)](https://github.com/abhijeet007rocks8)[![1174730](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1174730?v=4&w=50&h=50&mask=circle)](https://github.com/mouuff)[![20135478](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20135478?v=4&w=50&h=50&mask=circle)](https://github.com/Juneezee)[![151841](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/151841?v=4&w=50&h=50&mask=circle)](https://github.com/goodgravy)[![44368997](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/44368997?v=4&w=50&h=50&mask=circle)](https://github.com/radiantly)[![36989112](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36989112?v=4&w=50&h=50&mask=circle)](https://github.com/nishantwrp)[![7144772](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7144772?v=4&w=50&h=50&mask=circle)](https://github.com/sighingnow)[![697033](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/697033?v=4&w=50&h=50&mask=circle)](https://github.com/vglocus)[![2845540](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2845540?v=4&w=50&h=50&mask=circle)](https://github.com/RustedBones)[![4056828](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4056828?v=4&w=50&h=50&mask=circle)](https://github.com/pablocasares)[![1071153](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1071153?v=4&w=50&h=50&mask=circle)](https://github.com/evdokim)[![5732047](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5732047?v=4&w=50&h=50&mask=circle)](https://github.com/stormy-ua)[![471021](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/471021?v=4&w=50&h=50&mask=circle)](https://github.com/marschall)[![71284190](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/71284190?v=4&w=50&h=50&mask=circle)](https://github.com/gdungca-fn)[![26265392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26265392?v=4&w=50&h=50&mask=circle)](https://github.com/ttanay)[![85021780](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/85021780?v=4&w=50&h=50&mask=circle)](https://github.com/Abdullahi-Ahmed)[![48512530](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/48512530?v=4&w=50&h=50&mask=circle)](https://github.com/amaleelhamri)[![3275593](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3275593?v=4&w=50&h=50&mask=circle)](https://github.com/pradyunsg)[![66853113](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/68672?v=4&w=50&h=50&mask=circle)](https://github.com/apps/pre-commit-ci)[![1834509](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1834509?v=4&w=50&h=50&mask=circle)](https://github.com/jdknight)[![107893](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/107893?v=4&w=50&h=50&mask=circle)](https://github.com/kmike)[![1324225](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1324225?v=4&w=50&h=50&mask=circle)](https://github.com/hugovk)[![1300022](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1300022?v=4&w=50&h=50&mask=circle)](https://github.com/sirosen)[![244656](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/244656?v=4&w=50&h=50&mask=circle)](https://github.com/humitos)[![467294](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/467294?v=4&w=50&h=50&mask=circle)](https://github.com/bastimeyer)[![71486](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/71486?v=4&w=50&h=50&mask=circle)](https://github.com/asmeurer)[![20280470](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20280470?v=4&w=50&h=50&mask=circle)](https://github.com/drewyh)[![3533182](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3533182?v=4&w=50&h=50&mask=circle)](https://github.com/polyzen)[![199429](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/199429?v=4&w=50&h=50&mask=circle)](https://github.com/dvarrazzo)[![1032633](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1032633?v=4&w=50&h=50&mask=circle)](https://github.com/dbitouze)[![1313087](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1313087?v=4&w=50&h=50&mask=circle)](https://github.com/idryzhov)[![521097](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/521097?v=4&w=50&h=50&mask=circle)](https://github.com/pauloxnet)[![63936253](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/63936253?v=4&w=50&h=50&mask=circle)](https://github.com/ichard26)[![18519037](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18519037?v=4&w=50&h=50&mask=circle)](https://github.com/sethmlarson)[![413772](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/413772?v=4&w=50&h=50&mask=circle)](https://github.com/graingert)[![11478411](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11478411?v=4&w=50&h=50&mask=circle)](https://github.com/stonecharioteer)[![6739793](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6739793?v=4&w=50&h=50&mask=circle)](https://github.com/yeraydiazdiaz)[![83365562](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/83365562?v=4&w=50&h=50&mask=circle)](https://github.com/eviau-sat)[![6670894](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6670894?v=4&w=50&h=50&mask=circle)](https://github.com/rozsasarpi)[![86675](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/86675?v=4&w=50&h=50&mask=circle)](https://github.com/estan)[![4748863](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4748863?v=4&w=50&h=50&mask=circle)](https://github.com/pseudomuto)[![181308](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/181308?v=4&w=50&h=50&mask=circle)](https://github.com/htdvisser)[![1390277](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1390277?v=4&w=50&h=50&mask=circle)](https://github.com/jacobtolar)[![1391982](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1391982?v=4&w=50&h=50&mask=circle)](https://github.com/ezimanyi)[![3880001](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3880001?v=4&w=50&h=50&mask=circle)](https://github.com/lpabon)[![770392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/770392?v=4&w=50&h=50&mask=circle)](https://github.com/ArcEye)[![6178510](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6178510?v=4&w=50&h=50&mask=circle)](https://github.com/mingrammer)[![5111931](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5111931?v=4&w=50&h=50&mask=circle)](https://github.com/aschrijver)[![873434](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/873434?v=4&w=50&h=50&mask=circle)](https://github.com/panzerfahrer)[![16724](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16724?v=4&w=50&h=50&mask=circle)](https://github.com/glasser)[![17330872](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17330872?v=4&w=50&h=50&mask=circle)](https://github.com/murph0)[![419419](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/419419?v=4&w=50&h=50&mask=circle)](https://github.com/zetaron)[![1014](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1014?v=4&w=50&h=50&mask=circle)](https://github.com/sunfmin)[![504507](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/504507?v=4&w=50&h=50&mask=circle)](https://github.com/guozheng)[![8841470](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8841470?v=4&w=50&h=50&mask=circle)](https://github.com/suusan2go)[![901479](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/901479?v=4&w=50&h=50&mask=circle)](https://github.com/mhaberler)[![6400253](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6400253?v=4&w=50&h=50&mask=circle)](https://github.com/s4ichi)[![353644](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/353644?v=4&w=50&h=50&mask=circle)](https://github.com/dreampuf)[![12421077](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12421077?v=4&w=50&h=50&mask=circle)](https://github.com/UnicodingUnicorn)[![809865](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/809865?v=4&w=50&h=50&mask=circle)](https://github.com/philiptzou)[![19378](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19378?v=4&w=50&h=50&mask=circle)](https://github.com/timabell)[![1113245](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1113245?v=4&w=50&h=50&mask=circle)](https://github.com/jasonhancock)[![101659](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/101659?v=4&w=50&h=50&mask=circle)](https://github.com/matryer)[![4730508](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4730508?v=4&w=50&h=50&mask=circle)](https://github.com/piotrrojek)[![33036160](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/33036160?v=4&w=50&h=50&mask=circle)](https://github.com/jasonsattler)[![470810](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/470810?v=4&w=50&h=50&mask=circle)](https://github.com/sbward)[![7592392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7592392?v=4&w=50&h=50&mask=circle)](https://github.com/Pisush)[![94814](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/94814?v=4&w=50&h=50&mask=circle)](https://github.com/tamalsaha)[![8147854](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8147854?v=4&w=50&h=50&mask=circle)](https://github.com/marianina8)[![1005](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1005?v=4&w=50&h=50&mask=circle)](https://github.com/ernesto-jimenez)[![17263167](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17263167?v=4&w=50&h=50&mask=circle)](https://github.com/jsteenb2)[![2807589](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2807589?v=4&w=50&h=50&mask=circle)](https://github.com/darwayne)[![1683714](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1683714?v=4&w=50&h=50&mask=circle)](https://github.com/naysayer)[![6386887](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6386887?v=4&w=50&h=50&mask=circle)](https://github.com/AgrimPrasad)[![615811](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/615811?v=4&w=50&h=50&mask=circle)](https://github.com/dahernan)[![75184](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/75184?v=4&w=50&h=50&mask=circle)](https://github.com/jtarchie)[![469669](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/469669?v=4&w=50&h=50&mask=circle)](https://github.com/jdtobe)[![28523](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/28523?v=4&w=50&h=50&mask=circle)](https://github.com/alrs)[![10113228](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10113228?v=4&w=50&h=50&mask=circle)](https://github.com/urisimchoni)[![5751464](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5751464?v=4&w=50&h=50&mask=circle)](https://github.com/Xercoy)[![2405410](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2405410?v=4&w=50&h=50&mask=circle)](https://github.com/marbergq)[![5082160](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5082160?v=4&w=50&h=50&mask=circle)](https://github.com/anothrNick)[![11335612](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11335612?v=4&w=50&h=50&mask=circle)](https://github.com/fermoya)[![23391642](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23391642?v=4&w=50&h=50&mask=circle)](https://github.com/sbe-arg)[![1024762](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1024762?v=4&w=50&h=50&mask=circle)](https://github.com/PeerXu)[![7390781](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7390781?v=4&w=50&h=50&mask=circle)](https://github.com/reececomo)[![49680](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/49680?v=4&w=50&h=50&mask=circle)](https://github.com/dmerrick)[![87524](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/87524?v=4&w=50&h=50&mask=circle)](https://github.com/andrewcole)[![866505](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/866505?v=4&w=50&h=50&mask=circle)](https://github.com/phish108)[![2611549](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2611549?v=4&w=50&h=50&mask=circle)](https://github.com/endrjuskr)[![49961058](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/49961058?v=4&w=50&h=50&mask=circle)](https://github.com/bevans-HD)[![5655837](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5655837?v=4&w=50&h=50&mask=circle)](https://github.com/gukoff)[![8320753](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8320753?v=4&w=50&h=50&mask=circle)](https://github.com/lovromazgon)[![16513382](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16513382?v=4&w=50&h=50&mask=circle)](https://github.com/117)[![3807434](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3807434?v=4&w=50&h=50&mask=circle)](https://github.com/tomsolem)[![118945041](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/118945041?v=4&w=50&h=50&mask=circle)](https://github.com/vq-ambiata)[![8232503](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8232503?v=4&w=50&h=50&mask=circle)](https://github.com/sjauld)[![69170839](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/69170839?v=4&w=50&h=50&mask=circle)](https://github.com/adam-berrio)[![6388483](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6388483?v=4&w=50&h=50&mask=circle)](https://github.com/zsedem)[![8296645](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8296645?v=4&w=50&h=50&mask=circle)](https://github.com/imdanielsp)[![17337515](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17337515?v=4&w=50&h=50&mask=circle)](https://github.com/fabricepipart)[![10090384](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10090384?v=4&w=50&h=50&mask=circle)](https://github.com/ivanpk)[![2302957](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2302957?v=4&w=50&h=50&mask=circle)](https://github.com/JeremyLWright)[![995707](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/995707?v=4&w=50&h=50&mask=circle)](https://github.com/OskarStark)[![25486791](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25486791?v=4&w=50&h=50&mask=circle)](https://github.com/pavyarov)[![5067549](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5067549?v=4&w=50&h=50&mask=circle)](https://github.com/pellared)[![53085803](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/53085803?v=4&w=50&h=50&mask=circle)](https://github.com/cuttingedge1109)[![62775347](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/62775347?v=4&w=50&h=50&mask=circle)](https://github.com/okozachenko1203)[![25625597](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25625597?v=4&w=50&h=50&mask=circle)](https://github.com/zero-below)[![282792](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/282792?v=4&w=50&h=50&mask=circle)](https://github.com/asford)[![38894122](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/38894122?v=4&w=50&h=50&mask=circle)](https://github.com/bmcconeghy)[![16698198](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16698198?v=4&w=50&h=50&mask=circle)](https://github.com/conda-forge-admin)[![36490558](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36490558?v=4&w=50&h=50&mask=circle)](https://github.com/regro-cf-autotick-bot)[![79913779](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/102928?v=4&w=50&h=50&mask=circle)](https://github.com/apps/conda-forge-curator)[![41898282](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/15368?v=4&w=50&h=50&mask=circle)](https://github.com/apps/github-actions)[![18567580](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18567580?v=4&w=50&h=50&mask=circle)](https://github.com/conda-forge-linter)[![26092524](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26092524?v=4&w=50&h=50&mask=circle)](https://github.com/fellhorn) - +[![953358](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/953358?v=4&w=50&h=50&mask=circle)](https://github.com/katrogan)[![37090125](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37090125?v=4&w=50&h=50&mask=circle)](https://github.com/lyft-metaservice-3)[![7597118](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7597118?v=4&w=50&h=50&mask=circle)](https://github.com/matthewphsmith)[![27159](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27159?v=4&w=50&h=50&mask=circle)](https://github.com/EngHabu)[![29843943](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/29843943?v=4&w=50&h=50&mask=circle)](https://github.com/goreleaserbot)[![8888115](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8888115?v=4&w=50&h=50&mask=circle)](https://github.com/hamersaw)[![10830562](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10830562?v=4&w=50&h=50&mask=circle)](https://github.com/yindia)[![78108056](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/78108056?v=4&w=50&h=50&mask=circle)](https://github.com/flyte-bot)[![158892](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/158892?v=4&w=50&h=50&mask=circle)](https://github.com/honnix)[![18408237](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18408237?v=4&w=50&h=50&mask=circle)](https://github.com/anandswaminathan)[![2896568](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2896568?v=4&w=50&h=50&mask=circle)](https://github.com/wild-endeavor)[![37936015](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37936015?v=4&w=50&h=50&mask=circle)](https://github.com/pingsutw)[![653394](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/653394?v=4&w=50&h=50&mask=circle)](https://github.com/eapolinario)[![1518524](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1518524?v=4&w=50&h=50&mask=circle)](https://github.com/bnsblue)[![27724763](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27724763?v=4&w=50&h=50&mask=circle)](https://github.com/iaroslav-ciupin)[![16888709](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16888709?v=4&w=50&h=50&mask=circle)](https://github.com/kumare3)[![27777173](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27777173?v=4&w=50&h=50&mask=circle)](https://github.com/samhita-alla)[![23062603](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23062603?v=4&w=50&h=50&mask=circle)](https://github.com/Antaxify)[![77798312](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/77798312?v=4&w=50&h=50&mask=circle)](https://github.com/pmahindrakar-oss)[![5032356](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5032356?v=4&w=50&h=50&mask=circle)](https://github.com/brucearctor)[![8805803](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8805803?v=4&w=50&h=50&mask=circle)](https://github.com/alexlipa91)[![6239450](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6239450?v=4&w=50&h=50&mask=circle)](https://github.com/mayitbeegh)[![452166](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/452166?v=4&w=50&h=50&mask=circle)](https://github.com/MorpheusXAUT)[![15335863](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/15335863?v=4&w=50&h=50&mask=circle)](https://github.com/gvashishtha)[![6562898](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6562898?v=4&w=50&h=50&mask=circle)](https://github.com/ckiosidis)[![4748985](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4748985?v=4&w=50&h=50&mask=circle)](https://github.com/aliabbasjaffri)[![76461262](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/76461262?v=4&w=50&h=50&mask=circle)](https://github.com/Future-Outlier)[![5725707](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5725707?v=4&w=50&h=50&mask=circle)](https://github.com/andrewwdye)[![8122852](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8122852?v=4&w=50&h=50&mask=circle)](https://github.com/ariefrahmansyah)[![10869815](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10869815?v=4&w=50&h=50&mask=circle)](https://github.com/jeevb)[![3880645](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3880645?v=4&w=50&h=50&mask=circle)](https://github.com/jonathanburns)[![3936213](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3936213?v=4&w=50&h=50&mask=circle)](https://github.com/lu4nm3)[![26174213](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26174213?v=4&w=50&h=50&mask=circle)](https://github.com/lyft-metaservice-2)[![126913098](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/126913098?v=4&w=50&h=50&mask=circle)](https://github.com/squiishyy)[![46989299](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/46989299?v=4&w=50&h=50&mask=circle)](https://github.com/supreeth7)[![1815175](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1815175?v=4&w=50&h=50&mask=circle)](https://github.com/schottra)[![37558497](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37558497?v=4&w=50&h=50&mask=circle)](https://github.com/pvditt)[![5487021](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5487021?v=4&w=50&h=50&mask=circle)](https://github.com/veggiemonk)[![9142716](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/9142716?v=4&w=50&h=50&mask=circle)](https://github.com/2uasimojo)[![2816689](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2816689?v=4&w=50&h=50&mask=circle)](https://github.com/cosmicBboy)[![19375241](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19375241?v=4&w=50&h=50&mask=circle)](https://github.com/migueltol22)[![24364830](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24364830?v=4&w=50&h=50&mask=circle)](https://github.com/ByronHsu)[![53313394](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/53313394?v=4&w=50&h=50&mask=circle)](https://github.com/kosigz-lyft)[![43610471](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43610471?v=4&w=50&h=50&mask=circle)](https://github.com/yk-x-25)[![10526540](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10526540?v=4&w=50&h=50&mask=circle)](https://github.com/yubofredwang)[![16090976](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16090976?v=4&w=50&h=50&mask=circle)](https://github.com/surindersinghp)[![94349093](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/94349093?v=4&w=50&h=50&mask=circle)](https://github.com/SmritiSatyanV)[![70988](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/70988?v=4&w=50&h=50&mask=circle)](https://github.com/slai)[![6065051](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6065051?v=4&w=50&h=50&mask=circle)](https://github.com/milton0825)[![38207208](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/38207208?v=4&w=50&h=50&mask=circle)](https://github.com/tnsetting)[![95110820](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/95110820?v=4&w=50&h=50&mask=circle)](https://github.com/jerempy)[![11799671](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11799671?v=4&w=50&h=50&mask=circle)](https://github.com/bstadlbauer)[![34587798](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/34587798?v=4&w=50&h=50&mask=circle)](https://github.com/akhurana001)[![5026554](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5026554?v=4&w=50&h=50&mask=circle)](https://github.com/vsbus)[![1472826](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1472826?v=4&w=50&h=50&mask=circle)](https://github.com/maximsmol)[![31255434](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31255434?v=4&w=50&h=50&mask=circle)](https://github.com/kennyworkman)[![1330233](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1330233?v=4&w=50&h=50&mask=circle)](https://github.com/igorvalko)[![248688](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/248688?v=4&w=50&h=50&mask=circle)](https://github.com/hanzo)[![467927](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/467927?v=4&w=50&h=50&mask=circle)](https://github.com/kanterov)[![36511035](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36511035?v=4&w=50&h=50&mask=circle)](https://github.com/fg91)[![4967458](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4967458?v=4&w=50&h=50&mask=circle)](https://github.com/chanadian)[![8200209](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8200209?v=4&w=50&h=50&mask=circle)](https://github.com/catalinii)[![43587819](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43587819?v=4&w=50&h=50&mask=circle)](https://github.com/chetcode)[![163899](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/163899?v=4&w=50&h=50&mask=circle)](https://github.com/regadas)[![54248170](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54248170?v=4&w=50&h=50&mask=circle)](https://github.com/nicholasjng)[![2538760](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2538760?v=4&w=50&h=50&mask=circle)](https://github.com/akumor)[![104257](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/104257?v=4&w=50&h=50&mask=circle)](https://github.com/flixr)[![92917168](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/92917168?v=4&w=50&h=50&mask=circle)](https://github.com/edwinyyyu)[![1360529](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1360529?v=4&w=50&h=50&mask=circle)](https://github.com/clairemcginty)[![1777447](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1777447?v=4&w=50&h=50&mask=circle)](https://github.com/goyalankit)[![1316881](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1316881?v=4&w=50&h=50&mask=circle)](https://github.com/akashkatipally)[![22784654](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/22784654?v=4&w=50&h=50&mask=circle)](https://github.com/aybidi)[![5402633](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5402633?v=4&w=50&h=50&mask=circle)](https://github.com/thomasjpfan)[![49699333](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/29110?v=4&w=50&h=50&mask=circle)](https://github.com/apps/dependabot)[![72752478](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/72752478?v=4&w=50&h=50&mask=circle)](https://github.com/Mecoli1219)[![19733683](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19733683?v=4&w=50&h=50&mask=circle)](https://github.com/snyk-bot)[![114708546](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/114708546?v=4&w=50&h=50&mask=circle)](https://github.com/troychiu)[![35886692](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/35886692?v=4&w=50&h=50&mask=circle)](https://github.com/austin362667)[![47914085](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/47914085?v=4&w=50&h=50&mask=circle)](https://github.com/MortalHappiness)[![9131935](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/9131935?v=4&w=50&h=50&mask=circle)](https://github.com/Tom-Newton)[![155087](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/155087?v=4&w=50&h=50&mask=circle)](https://github.com/derwiki)[![40698988](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/40698988?v=4&w=50&h=50&mask=circle)](https://github.com/dansola)[![14800485](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14800485?v=4&w=50&h=50&mask=circle)](https://github.com/jasonlai1218)[![62143443](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/62143443?v=4&w=50&h=50&mask=circle)](https://github.com/mao3267)[![31577879](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31577879?v=4&w=50&h=50&mask=circle)](https://github.com/pryce-turner)[![1399455](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1399455?v=4&w=50&h=50&mask=circle)](https://github.com/th0114nd)[![58504997](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/58504997?v=4&w=50&h=50&mask=circle)](https://github.com/novahow)[![46030368](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/46030368?v=4&w=50&h=50&mask=circle)](https://github.com/ChungYujoyce)[![21109744](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/21109744?v=4&w=50&h=50&mask=circle)](https://github.com/AlekhyaSasi)[![1810591](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1810591?v=4&w=50&h=50&mask=circle)](https://github.com/asottile)[![54340816](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54340816?v=4&w=50&h=50&mask=circle)](https://github.com/granthamtaylor)[![89976021](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/89976021?v=4&w=50&h=50&mask=circle)](https://github.com/fiedlerNr9)[![51814063](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/51814063?v=4&w=50&h=50&mask=circle)](https://github.com/Yicheng-Lu-llll)[![9609986](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/9609986?v=4&w=50&h=50&mask=circle)](https://github.com/sonjaer)[![1153481](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1153481?v=4&w=50&h=50&mask=circle)](https://github.com/ppiegaze)[![35151789](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/35151789?v=4&w=50&h=50&mask=circle)](https://github.com/ggydush)[![140021987](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/140021987?v=4&w=50&h=50&mask=circle)](https://github.com/ddl-rliu)[![138256885](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/138256885?v=4&w=50&h=50&mask=circle)](https://github.com/ysysys3074)[![3939659](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3939659?v=4&w=50&h=50&mask=circle)](https://github.com/sbrunk)[![80421934](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/80421934?v=4&w=50&h=50&mask=circle)](https://github.com/SandraGH5)[![52046377](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/52046377?v=4&w=50&h=50&mask=circle)](https://github.com/hhcs9527)[![4406268](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4406268?v=4&w=50&h=50&mask=circle)](https://github.com/otarabai)[![16709018](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16709018?v=4&w=50&h=50&mask=circle)](https://github.com/noahjax)[![417209](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/417209?v=4&w=50&h=50&mask=circle)](https://github.com/neverett)[![27844407](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27844407?v=4&w=50&h=50&mask=circle)](https://github.com/ringohoffman)[![106939297](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/106939297?v=4&w=50&h=50&mask=circle)](https://github.com/chaohengstudent)[![380854](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/380854?v=4&w=50&h=50&mask=circle)](https://github.com/bgedik)[![18337807](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18337807?v=4&w=50&h=50&mask=circle)](https://github.com/max-hoffman)[![1276867](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1276867?v=4&w=50&h=50&mask=circle)](https://github.com/JackUrb)[![115421902](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/115421902?v=4&w=50&h=50&mask=circle)](https://github.com/wayner0628)[![36886416](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36886416?v=4&w=50&h=50&mask=circle)](https://github.com/JiangJiaWei1103)[![134093844](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/134093844?v=4&w=50&h=50&mask=circle)](https://github.com/rdeaton-freenome)[![106936600](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/106936600?v=4&w=50&h=50&mask=circle)](https://github.com/peridotml)[![26268253](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26268253?v=4&w=50&h=50&mask=circle)](https://github.com/arbaobao)[![16509490](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16509490?v=4&w=50&h=50&mask=circle)](https://github.com/ryankarlos)[![98242479](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/98242479?v=4&w=50&h=50&mask=circle)](https://github.com/RichhLi)[![98349643](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/98349643?v=4&w=50&h=50&mask=circle)](https://github.com/rahul-theorem)[![12219405](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12219405?v=4&w=50&h=50&mask=circle)](https://github.com/fediazgon)[![322624](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/322624?v=4&w=50&h=50&mask=circle)](https://github.com/AdrianoKF)[![953385](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/953385?v=4&w=50&h=50&mask=circle)](https://github.com/blaketastic2)[![30375389](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/30375389?v=4&w=50&h=50&mask=circle)](https://github.com/bimtauer)[![92072956](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/92072956?v=4&w=50&h=50&mask=circle)](https://github.com/PudgyPigeon)[![97543480](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/97543480?v=4&w=50&h=50&mask=circle)](https://github.com/esadler-hbo)[![69013027](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/69013027?v=4&w=50&h=50&mask=circle)](https://github.com/ggydush-fn)[![116700206](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/116700206?v=4&w=50&h=50&mask=circle)](https://github.com/kiliangojek)[![1521126](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1521126?v=4&w=50&h=50&mask=circle)](https://github.com/pbrogan12)[![120470035](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/120470035?v=4&w=50&h=50&mask=circle)](https://github.com/redartera)[![4025771](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4025771?v=4&w=50&h=50&mask=circle)](https://github.com/andresgomezfrr)[![422486](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/422486?v=4&w=50&h=50&mask=circle)](https://github.com/bethebunny)[![26092524](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26092524?v=4&w=50&h=50&mask=circle)](https://github.com/fellhorn)[![1168692](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1168692?v=4&w=50&h=50&mask=circle)](https://github.com/dennisobrien)[![33652917](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/33652917?v=4&w=50&h=50&mask=circle)](https://github.com/hfurkanvural)[![45017130](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/45017130?v=4&w=50&h=50&mask=circle)](https://github.com/helenzhangyc)[![1659910](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1659910?v=4&w=50&h=50&mask=circle)](https://github.com/oliverhu)[![91385411](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/91385411?v=4&w=50&h=50&mask=circle)](https://github.com/Ln11211)[![30621230](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/30621230?v=4&w=50&h=50&mask=circle)](https://github.com/aeioulisa)[![54334265](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54334265?v=4&w=50&h=50&mask=circle)](https://github.com/michaels-lyft)[![48736656](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/48736656?v=4&w=50&h=50&mask=circle)](https://github.com/murilommen)[![150836163](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/150836163?v=4&w=50&h=50&mask=circle)](https://github.com/neilisaur)[![17165004](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17165004?v=4&w=50&h=50&mask=circle)](https://github.com/RobertoRRW)[![81233629](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/81233629?v=4&w=50&h=50&mask=circle)](https://github.com/101rakibulhasan)[![38955457](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/38955457?v=4&w=50&h=50&mask=circle)](https://github.com/RRK1000)[![2614101](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2614101?v=4&w=50&h=50&mask=circle)](https://github.com/RobinKa)[![4308533](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4308533?v=4&w=50&h=50&mask=circle)](https://github.com/rubenbarragan)[![10201242](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10201242?v=4&w=50&h=50&mask=circle)](https://github.com/sugatoray)[![11269256](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11269256?v=4&w=50&h=50&mask=circle)](https://github.com/sushrut111)[![61228633](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/61228633?v=4&w=50&h=50&mask=circle)](https://github.com/Tat-V)[![13070236](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13070236?v=4&w=50&h=50&mask=circle)](https://github.com/TeoZosa)[![8817639](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8817639?v=4&w=50&h=50&mask=circle)](https://github.com/ThomVett)[![17309187](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17309187?v=4&w=50&h=50&mask=circle)](https://github.com/datability-io)[![2640499](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2640499?v=4&w=50&h=50&mask=circle)](https://github.com/wirthual)[![97332401](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/97332401?v=4&w=50&h=50&mask=circle)](https://github.com/RaghavMangla)[![100569684](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/100569684?v=4&w=50&h=50&mask=circle)](https://github.com/RRap0so)[![147648834](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/147648834?v=4&w=50&h=50&mask=circle)](https://github.com/quinten-flwls)[![37170063](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37170063?v=4&w=50&h=50&mask=circle)](https://github.com/Qiwen-Yu)[![43886578](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43886578?v=4&w=50&h=50&mask=circle)](https://github.com/400Ping)[![125105](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/125105?v=4&w=50&h=50&mask=circle)](https://github.com/tekumara)[![37547264](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/37547264?v=4&w=50&h=50&mask=circle)](https://github.com/Nan2018)[![49385643](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/49385643?v=4&w=50&h=50&mask=circle)](https://github.com/MinuraPunchihewa)[![10376195](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10376195?v=4&w=50&h=50&mask=circle)](https://github.com/myz540)[![4417105](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4417105?v=4&w=50&h=50&mask=circle)](https://github.com/Terryhung)[![73247359](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/73247359?v=4&w=50&h=50&mask=circle)](https://github.com/stef-stripe)[![12913704](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12913704?v=4&w=50&h=50&mask=circle)](https://github.com/mg515)[![119345186](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/119345186?v=4&w=50&h=50&mask=circle)](https://github.com/mcloney-ddm)[![13331724](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13331724?v=4&w=50&h=50&mask=circle)](https://github.com/martinlyra)[![24611279](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24611279?v=4&w=50&h=50&mask=circle)](https://github.com/ericwudayi)[![6333870](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6333870?v=4&w=50&h=50&mask=circle)](https://github.com/demmerichs)[![4023015](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4023015?v=4&w=50&h=50&mask=circle)](https://github.com/pradithya)[![12450632](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12450632?v=4&w=50&h=50&mask=circle)](https://github.com/ajsalow)[![3741621](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3741621?v=4&w=50&h=50&mask=circle)](https://github.com/palchicz)[![43726198](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43726198?v=4&w=50&h=50&mask=circle)](https://github.com/yundai424)[![131146298](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/131146298?v=4&w=50&h=50&mask=circle)](https://github.com/yini7777)[![29053051](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/29053051?v=4&w=50&h=50&mask=circle)](https://github.com/XinEDprob)[![52355146](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/52355146?v=4&w=50&h=50&mask=circle)](https://github.com/lowc1012)[![40901950](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/40901950?v=4&w=50&h=50&mask=circle)](https://github.com/WebOfNakedFancies)[![67166843](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/67166843?v=4&w=50&h=50&mask=circle)](https://github.com/vvasavada-fn)[![15071835](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/15071835?v=4&w=50&h=50&mask=circle)](https://github.com/va6996)[![3391550](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3391550?v=4&w=50&h=50&mask=circle)](https://github.com/devictr)[![57967031](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/57967031?v=4&w=50&h=50&mask=circle)](https://github.com/varshaparthay)[![5092599](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5092599?v=4&w=50&h=50&mask=circle)](https://github.com/vchowdhary)[![26834658](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26834658?v=4&w=50&h=50&mask=circle)](https://github.com/techytushar)[![14007150](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14007150?v=4&w=50&h=50&mask=circle)](https://github.com/deepyaman)[![2380665](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2380665?v=4&w=50&h=50&mask=circle)](https://github.com/DavidMertz)[![16297104](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16297104?v=4&w=50&h=50&mask=circle)](https://github.com/danpf)[![10463690](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10463690?v=4&w=50&h=50&mask=circle)](https://github.com/cjidboon94)[![26920893](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26920893?v=4&w=50&h=50&mask=circle)](https://github.com/chinghongfang)[![27000005](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27000005?v=4&w=50&h=50&mask=circle)](https://github.com/supercharleszhu)[![420942](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/420942?v=4&w=50&h=50&mask=circle)](https://github.com/cameronraysmith)[![6288302](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6288302?v=4&w=50&h=50&mask=circle)](https://github.com/CalvinLeather)[![179035736](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/179035736?v=4&w=50&h=50&mask=circle)](https://github.com/bryan-hunted)[![4396228](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4396228?v=4&w=50&h=50&mask=circle)](https://github.com/bryanwweber)[![7422223](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7422223?v=4&w=50&h=50&mask=circle)](https://github.com/bcvanmeurs)[![234145](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/234145?v=4&w=50&h=50&mask=circle)](https://github.com/benoistlaurent)[![31381038](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31381038?v=4&w=50&h=50&mask=circle)](https://github.com/lordnodd)[![49250723](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/49250723?v=4&w=50&h=50&mask=circle)](https://github.com/ArthurBook)[![58334441](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/58334441?v=4&w=50&h=50&mask=circle)](https://github.com/wckdman)[![23013825](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23013825?v=4&w=50&h=50&mask=circle)](https://github.com/arpitbhardwaj)[![77167782](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/77167782?v=4&w=50&h=50&mask=circle)](https://github.com/apatel-fn)[![48966647](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/48966647?v=4&w=50&h=50&mask=circle)](https://github.com/asahalyft)[![7005765](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7005765?v=4&w=50&h=50&mask=circle)](https://github.com/convexquad)[![54333860](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54333860?v=4&w=50&h=50&mask=circle)](https://github.com/aalavian)[![110886184](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/110886184?v=4&w=50&h=50&mask=circle)](https://github.com/aditya7302)[![19853373](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19853373?v=4&w=50&h=50&mask=circle)](https://github.com/NotMatthewGriffin)[![34498039](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/34498039?v=4&w=50&h=50&mask=circle)](https://github.com/matheusMoreno)[![20173739](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20173739?v=4&w=50&h=50&mask=circle)](https://github.com/madhur-tandon)[![4410453](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4410453?v=4&w=50&h=50&mask=circle)](https://github.com/mdjong1)[![113847439](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/113847439?v=4&w=50&h=50&mask=circle)](https://github.com/LunarMarathon)[![131469540](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/131469540?v=4&w=50&h=50&mask=circle)](https://github.com/knordstrom-muon)[![488594](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/488594?v=4&w=50&h=50&mask=circle)](https://github.com/jcugat)[![6984748](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6984748?v=4&w=50&h=50&mask=circle)](https://github.com/jbrambleDC)[![28351896](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/28351896?v=4&w=50&h=50&mask=circle)](https://github.com/JasonZhu1313)[![1274471](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1274471?v=4&w=50&h=50&mask=circle)](https://github.com/Sovietaced)[![7358951](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7358951?v=4&w=50&h=50&mask=circle)](https://github.com/frsann)[![121866694](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/121866694?v=4&w=50&h=50&mask=circle)](https://github.com/franco-bocci)[![1530049](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1530049?v=4&w=50&h=50&mask=circle)](https://github.com/felixmulder)[![111539728](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/111539728?v=4&w=50&h=50&mask=circle)](https://github.com/ddl-ebrown)[![23107192](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23107192?v=4&w=50&h=50&mask=circle)](https://github.com/YmirKhang)[![6596957](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6596957?v=4&w=50&h=50&mask=circle)](https://github.com/elibixby)[![173942673](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/173942673?v=4&w=50&h=50&mask=circle)](https://github.com/dylanspag-lmco)[![103009868](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/103009868?v=4&w=50&h=50&mask=circle)](https://github.com/douenergy)[![6774758](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6774758?v=4&w=50&h=50&mask=circle)](https://github.com/ddhirajkumar)[![50860453](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/50860453?v=4&w=50&h=50&mask=circle)](https://github.com/charlie0220)[![6506810](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6506810?v=4&w=50&h=50&mask=circle)](https://github.com/stephen37)[![6610300](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6610300?v=4&w=50&h=50&mask=circle)](https://github.com/ursucarina)[![55718143](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/55718143?v=4&w=50&h=50&mask=circle)](https://github.com/anrusina)[![65977800](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/65977800?v=4&w=50&h=50&mask=circle)](https://github.com/service-github-lyft-semantic-release)[![84735036](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/84735036?v=4&w=50&h=50&mask=circle)](https://github.com/jsonporter)[![85753828](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/85753828?v=4&w=50&h=50&mask=circle)](https://github.com/govalt)[![105876962](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/105876962?v=4&w=50&h=50&mask=circle)](https://github.com/james-union)[![101579322](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/101579322?v=4&w=50&h=50&mask=circle)](https://github.com/olga-union)[![26953709](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26953709?v=4&w=50&h=50&mask=circle)](https://github.com/Pianist038801)[![25038146](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25038146?v=4&w=50&h=50&mask=circle)](https://github.com/eugenejahn)[![88684372](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/88684372?v=4&w=50&h=50&mask=circle)](https://github.com/4nalog)[![8129392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8129392?v=4&w=50&h=50&mask=circle)](https://github.com/FrankFlitton)[![99441958](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/99441958?v=4&w=50&h=50&mask=circle)](https://github.com/apTalya)[![59022542](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/59022542?v=4&w=50&h=50&mask=circle)](https://github.com/lyonlu13)[![72861891](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/72861891?v=4&w=50&h=50&mask=circle)](https://github.com/xwk1246)[![1902623](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1902623?v=4&w=50&h=50&mask=circle)](https://github.com/trutx)[![59891164](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/59891164?v=4&w=50&h=50&mask=circle)](https://github.com/K-Kumar-01)[![20668349](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20668349?v=4&w=50&h=50&mask=circle)](https://github.com/HiromuHota)[![58770001](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/58770001?v=4&w=50&h=50&mask=circle)](https://github.com/Professional0321)[![1388071](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1388071?v=4&w=50&h=50&mask=circle)](https://github.com/aviaviavi)[![18363301](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18363301?v=4&w=50&h=50&mask=circle)](https://github.com/jimbobby5)[![25695302](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25695302?v=4&w=50&h=50&mask=circle)](https://github.com/sisco0)[![6399428](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6399428?v=4&w=50&h=50&mask=circle)](https://github.com/live-wire)[![17351764](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17351764?v=4&w=50&h=50&mask=circle)](https://github.com/daniel-shuy)[![31982395](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31982395?v=4&w=50&h=50&mask=circle)](https://github.com/alexapdev)[![7515359](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7515359?v=4&w=50&h=50&mask=circle)](https://github.com/narape)[![7548823](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7548823?v=4&w=50&h=50&mask=circle)](https://github.com/manuelrombach)[![50679871](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/50679871?v=4&w=50&h=50&mask=circle)](https://github.com/lupasarin)[![25364490](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25364490?v=4&w=50&h=50&mask=circle)](https://github.com/haoyuez)[![3451399](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3451399?v=4&w=50&h=50&mask=circle)](https://github.com/skiptomyliu)[![66767992](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/66767992?v=4&w=50&h=50&mask=circle)](https://github.com/10sharmashivam)[![62209650](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/62209650?v=4&w=50&h=50&mask=circle)](https://github.com/3t8)[![82604841](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/82604841?v=4&w=50&h=50&mask=circle)](https://github.com/davidmirror-ops)[![1892175](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1892175?v=4&w=50&h=50&mask=circle)](https://github.com/zeryx)[![66259759](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/66259759?v=4&w=50&h=50&mask=circle)](https://github.com/popojk)[![64233065](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/64233065?v=4&w=50&h=50&mask=circle)](https://github.com/rachfop)[![11166516](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11166516?v=4&w=50&h=50&mask=circle)](https://github.com/hebiao064)[![110307215](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/110307215?v=4&w=50&h=50&mask=circle)](https://github.com/sumana-2705)[![35962310](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/35962310?v=4&w=50&h=50&mask=circle)](https://github.com/trishitapingolia)[![91927689](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/91927689?v=4&w=50&h=50&mask=circle)](https://github.com/Smartmind12)[![726061](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/726061?v=4&w=50&h=50&mask=circle)](https://github.com/huxuan)[![42114946](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/42114946?v=4&w=50&h=50&mask=circle)](https://github.com/DenChenn)[![47872044](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/47872044?v=4&w=50&h=50&mask=circle)](https://github.com/privatedumbo)[![105229971](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/105229971?v=4&w=50&h=50&mask=circle)](https://github.com/tjKairos)[![200401](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/200401?v=4&w=50&h=50&mask=circle)](https://github.com/arturdryomov)[![13770222](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13770222?v=4&w=50&h=50&mask=circle)](https://github.com/ChickenTarm)[![117322020](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/117322020?v=4&w=50&h=50&mask=circle)](https://github.com/cdreetz)[![24739949](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24739949?v=4&w=50&h=50&mask=circle)](https://github.com/felixwang9817)[![64864908](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/64864908?v=4&w=50&h=50&mask=circle)](https://github.com/xshen8888)[![10430635](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10430635?v=4&w=50&h=50&mask=circle)](https://github.com/juandiegopalomino)[![31911175](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/31911175?v=4&w=50&h=50&mask=circle)](https://github.com/kanyesthaker)[![104152793](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/104152793?v=4&w=50&h=50&mask=circle)](https://github.com/marc-union)[![27818609](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/27818609?v=4&w=50&h=50&mask=circle)](https://github.com/michaeltinsley)[![22797900](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/22797900?v=4&w=50&h=50&mask=circle)](https://github.com/stolarczyk)[![6486584](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6486584?v=4&w=50&h=50&mask=circle)](https://github.com/mucahitkantepe)[![321459](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/321459?v=4&w=50&h=50&mask=circle)](https://github.com/oyevtushok)[![405480](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/405480?v=4&w=50&h=50&mask=circle)](https://github.com/georgesnelling)[![54046807](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54046807?v=4&w=50&h=50&mask=circle)](https://github.com/kamaleybov)[![1004789](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1004789?v=4&w=50&h=50&mask=circle)](https://github.com/dschaller)[![1659415](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1659415?v=4&w=50&h=50&mask=circle)](https://github.com/dav009)[![1031759](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1031759?v=4&w=50&h=50&mask=circle)](https://github.com/agiron123)[![107633597](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/107633597?v=4&w=50&h=50&mask=circle)](https://github.com/peterghaddad)[![50983601](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/50983601?v=4&w=50&h=50&mask=circle)](https://github.com/zychen5186)[![136724527](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/136724527?v=4&w=50&h=50&mask=circle)](https://github.com/Murdock9803)[![144381122](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/144381122?v=4&w=50&h=50&mask=circle)](https://github.com/vraiyaninv)[![24486999](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24486999?v=4&w=50&h=50&mask=circle)](https://github.com/suravshrestha)[![69161722](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/69161722?v=4&w=50&h=50&mask=circle)](https://github.com/noobkid2411)[![790725](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/790725?v=4&w=50&h=50&mask=circle)](https://github.com/rodrigobaron)[![43336767](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43336767?v=4&w=50&h=50&mask=circle)](https://github.com/yongchand)[![36594527](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36594527?v=4&w=50&h=50&mask=circle)](https://github.com/mishmanners)[![25391173](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25391173?v=4&w=50&h=50&mask=circle)](https://github.com/nicklofaso)[![86911142](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/86911142?v=4&w=50&h=50&mask=circle)](https://github.com/idivyanshbansal)[![380927](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/380927?v=4&w=50&h=50&mask=circle)](https://github.com/cpaulik)[![480621](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/480621?v=4&w=50&h=50&mask=circle)](https://github.com/davidxia)[![1335881](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1335881?v=4&w=50&h=50&mask=circle)](https://github.com/hoyajigi)[![100597998](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/100597998?v=4&w=50&h=50&mask=circle)](https://github.com/MrKrishnaAgarwal)[![4830700](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4830700?v=4&w=50&h=50&mask=circle)](https://github.com/NitinAgg)[![139771199](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/139771199?v=4&w=50&h=50&mask=circle)](https://github.com/taieeuu)[![33272587](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/33272587?v=4&w=50&h=50&mask=circle)](https://github.com/samuel-sujith)[![10438373](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10438373?v=4&w=50&h=50&mask=circle)](https://github.com/SKalt)[![24543401](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24543401?v=4&w=50&h=50&mask=circle)](https://github.com/asoundarya96)[![141538510](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/141538510?v=4&w=50&h=50&mask=circle)](https://github.com/SophieTech88)[![47355538](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/47355538?v=4&w=50&h=50&mask=circle)](https://github.com/siiddhantt)[![54034701](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/54034701?v=4&w=50&h=50&mask=circle)](https://github.com/peterxcli)[![580328](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/580328?v=4&w=50&h=50&mask=circle)](https://github.com/ilikedata)[![26265392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26265392?v=4&w=50&h=50&mask=circle)](https://github.com/ttanay)[![7144772](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7144772?v=4&w=50&h=50&mask=circle)](https://github.com/sighingnow)[![61864060](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/61864060?v=4&w=50&h=50&mask=circle)](https://github.com/HuangTing-Yao)[![1027207](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1027207?v=4&w=50&h=50&mask=circle)](https://github.com/orf)[![78115767](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/78115767?v=4&w=50&h=50&mask=circle)](https://github.com/trevormcguire)[![8755869](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8755869?v=4&w=50&h=50&mask=circle)](https://github.com/paravatha)[![141313113](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/141313113?v=4&w=50&h=50&mask=circle)](https://github.com/robert-ulbrich-mercedes-benz)[![6528449](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6528449?v=4&w=50&h=50&mask=circle)](https://github.com/uschi2000)[![576968](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/576968?v=4&w=50&h=50&mask=circle)](https://github.com/ronaldosaheki)[![10095462](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10095462?v=4&w=50&h=50&mask=circle)](https://github.com/GRomR1)[![144255851](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/144255851?v=4&w=50&h=50&mask=circle)](https://github.com/Sennuno)[![36827492](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36827492?v=4&w=50&h=50&mask=circle)](https://github.com/shahwar9)[![34468461](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/34468461?v=4&w=50&h=50&mask=circle)](https://github.com/sshardool)[![1908193](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1908193?v=4&w=50&h=50&mask=circle)](https://github.com/shengyu7697)[![133936](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/133936?v=4&w=50&h=50&mask=circle)](https://github.com/shihgianlee)[![119912892](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/119912892?v=4&w=50&h=50&mask=circle)](https://github.com/Virtual4087)[![46835608](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/46835608?v=4&w=50&h=50&mask=circle)](https://github.com/shreyas44)[![40143026](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/40143026?v=4&w=50&h=50&mask=circle)](https://github.com/hampusrosvall)[![77197126](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/77197126?v=4&w=50&h=50&mask=circle)](https://github.com/hitarth01)[![300315](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/300315?v=4&w=50&h=50&mask=circle)](https://github.com/jcourteau)[![1220444](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1220444?v=4&w=50&h=50&mask=circle)](https://github.com/jkhales)[![106815366](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/106815366?v=4&w=50&h=50&mask=circle)](https://github.com/jw0515)[![1568889](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1568889?v=4&w=50&h=50&mask=circle)](https://github.com/leorleor)[![168411899](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/168411899?v=4&w=50&h=50&mask=circle)](https://github.com/mthemis-provenir)[![937967](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/937967?v=4&w=50&h=50&mask=circle)](https://github.com/moose007)[![73983677](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/73983677?v=4&w=50&h=50&mask=circle)](https://github.com/omahs)[![114232404](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/114232404?v=4&w=50&h=50&mask=circle)](https://github.com/sanjaychouhan-adf)[![11962777](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11962777?v=4&w=50&h=50&mask=circle)](https://github.com/ssen85)[![14996868](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14996868?v=4&w=50&h=50&mask=circle)](https://github.com/v01dXYZ)[![93438190](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/93438190?v=4&w=50&h=50&mask=circle)](https://github.com/wanderer163)[![16526627](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16526627?v=4&w=50&h=50&mask=circle)](https://github.com/vijaysaravana)[![697033](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/697033?v=4&w=50&h=50&mask=circle)](https://github.com/vglocus)[![2272137](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2272137?v=4&w=50&h=50&mask=circle)](https://github.com/Dlougach)[![39889](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/39889?v=4&w=50&h=50&mask=circle)](https://github.com/yarikoptic)[![12821510](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12821510?v=4&w=50&h=50&mask=circle)](https://github.com/ongkong)[![26526132](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26526132?v=4&w=50&h=50&mask=circle)](https://github.com/bearomorphism)[![43691987](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/43691987?v=4&w=50&h=50&mask=circle)](https://github.com/desihsu)[![5346764](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5346764?v=4&w=50&h=50&mask=circle)](https://github.com/fsz285)[![143190185](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/143190185?v=4&w=50&h=50&mask=circle)](https://github.com/gdabisias)[![22917741](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/22917741?v=4&w=50&h=50&mask=circle)](https://github.com/gigi-at-zymergen)[![11796986](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11796986?v=4&w=50&h=50&mask=circle)](https://github.com/avan-sh)[![489331](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/489331?v=4&w=50&h=50&mask=circle)](https://github.com/brndnblck)[![304786](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/304786?v=4&w=50&h=50&mask=circle)](https://github.com/kinow)[![156356273](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/156356273?v=4&w=50&h=50&mask=circle)](https://github.com/cratiu222)[![24402505](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/24402505?v=4&w=50&h=50&mask=circle)](https://github.com/Daeruin)[![102558755](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/102558755?v=4&w=50&h=50&mask=circle)](https://github.com/dyu-bot)[![146735585](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/146735585?v=4&w=50&h=50&mask=circle)](https://github.com/nnsW3)[![20135478](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20135478?v=4&w=50&h=50&mask=circle)](https://github.com/Juneezee)[![1627021](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1627021?v=4&w=50&h=50&mask=circle)](https://github.com/EraYaN)[![11456773](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11456773?v=4&w=50&h=50&mask=circle)](https://github.com/fvde)[![64676594](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/64676594?v=4&w=50&h=50&mask=circle)](https://github.com/abhijeet007rocks8)[![132337675](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/132337675?v=4&w=50&h=50&mask=circle)](https://github.com/adarsh-jha-dev)[![1627770](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1627770?v=4&w=50&h=50&mask=circle)](https://github.com/amitani)[![128223364](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/128223364?v=4&w=50&h=50&mask=circle)](https://github.com/blindaks)[![66388192](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/66388192?v=4&w=50&h=50&mask=circle)](https://github.com/mounesi)[![13237080](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13237080?v=4&w=50&h=50&mask=circle)](https://github.com/aminmaghsodi)[![14992189](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14992189?v=4&w=50&h=50&mask=circle)](https://github.com/eanakhl)[![1175392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1175392?v=4&w=50&h=50&mask=circle)](https://github.com/adinin)[![26172355](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/26172355?v=4&w=50&h=50&mask=circle)](https://github.com/ALMerrill)[![48056316](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/48056316?v=4&w=50&h=50&mask=circle)](https://github.com/ap0calypse8)[![7475946](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7475946?v=4&w=50&h=50&mask=circle)](https://github.com/anton-malakhov)[![1174730](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1174730?v=4&w=50&h=50&mask=circle)](https://github.com/mouuff)[![93093775](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/93093775?v=4&w=50&h=50&mask=circle)](https://github.com/Ash0807)[![44368997](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/44368997?v=4&w=50&h=50&mask=circle)](https://github.com/radiantly)[![16404204](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16404204?v=4&w=50&h=50&mask=circle)](https://github.com/Jeinhaus)[![3033592](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3033592?v=4&w=50&h=50&mask=circle)](https://github.com/kazesberger)[![13591898](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13591898?v=4&w=50&h=50&mask=circle)](https://github.com/lauralindy)[![19229049](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19229049?v=4&w=50&h=50&mask=circle)](https://github.com/lsena)[![123787712](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/123787712?v=4&w=50&h=50&mask=circle)](https://github.com/mark-thm)[![2236795](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2236795?v=4&w=50&h=50&mask=circle)](https://github.com/mhotan)[![10829864](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10829864?v=4&w=50&h=50&mask=circle)](https://github.com/mcanueste)[![36989112](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36989112?v=4&w=50&h=50&mask=circle)](https://github.com/nishantwrp)[![260015](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/260015?v=4&w=50&h=50&mask=circle)](https://github.com/ossareh)[![6987428](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6987428?v=4&w=50&h=50&mask=circle)](https://github.com/guyarad)[![1596283](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1596283?v=4&w=50&h=50&mask=circle)](https://github.com/guy4261)[![7490199](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7490199?v=4&w=50&h=50&mask=circle)](https://github.com/Lundez)[![10345184](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10345184?v=4&w=50&h=50&mask=circle)](https://github.com/hasukmistry)[![91054457](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/91054457?v=4&w=50&h=50&mask=circle)](https://github.com/HeetVekariya)[![29532638](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/29532638?v=4&w=50&h=50&mask=circle)](https://github.com/rokrokss)[![22633385](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/22633385?v=4&w=50&h=50&mask=circle)](https://github.com/eltociear)[![151841](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/151841?v=4&w=50&h=50&mask=circle)](https://github.com/goodgravy)[![46633758](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/46633758?v=4&w=50&h=50&mask=circle)](https://github.com/jsong336)[![14008978](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/14008978?v=4&w=50&h=50&mask=circle)](https://github.com/jeremydonahue)[![9272376](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/9272376?v=4&w=50&h=50&mask=circle)](https://github.com/jonasdebeukelaer)[![1633460](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1633460?v=4&w=50&h=50&mask=circle)](https://github.com/jmcarp)[![1043051](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1043051?v=4&w=50&h=50&mask=circle)](https://github.com/kylewaynebenson)[![21953442](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/21953442?v=4&w=50&h=50&mask=circle)](https://github.com/Gui11aum3)[![16461847](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16461847?v=4&w=50&h=50&mask=circle)](https://github.com/JakeNeyer)[![299421](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/299421?v=4&w=50&h=50&mask=circle)](https://github.com/aliavni)[![2845540](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2845540?v=4&w=50&h=50&mask=circle)](https://github.com/RustedBones)[![4056828](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4056828?v=4&w=50&h=50&mask=circle)](https://github.com/pablocasares)[![138898](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/138898?v=4&w=50&h=50&mask=circle)](https://github.com/andyczerwonka)[![150935185](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/150935185?v=4&w=50&h=50&mask=circle)](https://github.com/jschuchart-spot)[![471021](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/471021?v=4&w=50&h=50&mask=circle)](https://github.com/marschall)[![5732047](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5732047?v=4&w=50&h=50&mask=circle)](https://github.com/stormy-ua)[![1071153](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1071153?v=4&w=50&h=50&mask=circle)](https://github.com/evdokim)[![13670774](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/13670774?v=4&w=50&h=50&mask=circle)](https://github.com/AndersonReyes)[![438217](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/438217?v=4&w=50&h=50&mask=circle)](https://github.com/acet)[![71284190](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/71284190?v=4&w=50&h=50&mask=circle)](https://github.com/gdungca-fn)[![85021780](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/85021780?v=4&w=50&h=50&mask=circle)](https://github.com/Abdullahi-Ahmed)[![48512530](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/48512530?v=4&w=50&h=50&mask=circle)](https://github.com/amaleelhamri)[![3275593](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3275593?v=4&w=50&h=50&mask=circle)](https://github.com/pradyunsg)[![66853113](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/68672?v=4&w=50&h=50&mask=circle)](https://github.com/apps/pre-commit-ci)[![1834509](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1834509?v=4&w=50&h=50&mask=circle)](https://github.com/jdknight)[![107893](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/107893?v=4&w=50&h=50&mask=circle)](https://github.com/kmike)[![1324225](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1324225?v=4&w=50&h=50&mask=circle)](https://github.com/hugovk)[![1300022](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1300022?v=4&w=50&h=50&mask=circle)](https://github.com/sirosen)[![244656](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/244656?v=4&w=50&h=50&mask=circle)](https://github.com/humitos)[![467294](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/467294?v=4&w=50&h=50&mask=circle)](https://github.com/bastimeyer)[![71486](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/71486?v=4&w=50&h=50&mask=circle)](https://github.com/asmeurer)[![20280470](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/20280470?v=4&w=50&h=50&mask=circle)](https://github.com/drewyh)[![3533182](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3533182?v=4&w=50&h=50&mask=circle)](https://github.com/polyzen)[![199429](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/199429?v=4&w=50&h=50&mask=circle)](https://github.com/dvarrazzo)[![1032633](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1032633?v=4&w=50&h=50&mask=circle)](https://github.com/dbitouze)[![1313087](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1313087?v=4&w=50&h=50&mask=circle)](https://github.com/idryzhov)[![521097](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/521097?v=4&w=50&h=50&mask=circle)](https://github.com/pauloxnet)[![63936253](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/63936253?v=4&w=50&h=50&mask=circle)](https://github.com/ichard26)[![18519037](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18519037?v=4&w=50&h=50&mask=circle)](https://github.com/sethmlarson)[![413772](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/413772?v=4&w=50&h=50&mask=circle)](https://github.com/graingert)[![11478411](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11478411?v=4&w=50&h=50&mask=circle)](https://github.com/stonecharioteer)[![6739793](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6739793?v=4&w=50&h=50&mask=circle)](https://github.com/yeraydiazdiaz)[![83365562](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/83365562?v=4&w=50&h=50&mask=circle)](https://github.com/eviau-sat)[![6670894](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6670894?v=4&w=50&h=50&mask=circle)](https://github.com/rozsasarpi)[![86675](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/86675?v=4&w=50&h=50&mask=circle)](https://github.com/estan)[![4748863](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4748863?v=4&w=50&h=50&mask=circle)](https://github.com/pseudomuto)[![181308](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/181308?v=4&w=50&h=50&mask=circle)](https://github.com/htdvisser)[![1390277](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1390277?v=4&w=50&h=50&mask=circle)](https://github.com/jacobtolar)[![1391982](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1391982?v=4&w=50&h=50&mask=circle)](https://github.com/ezimanyi)[![135130171](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/135130171?v=4&w=50&h=50&mask=circle)](https://github.com/hmacias-avaya)[![3880001](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3880001?v=4&w=50&h=50&mask=circle)](https://github.com/lpabon)[![770392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/770392?v=4&w=50&h=50&mask=circle)](https://github.com/ArcEye)[![6178510](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6178510?v=4&w=50&h=50&mask=circle)](https://github.com/mingrammer)[![5111931](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5111931?v=4&w=50&h=50&mask=circle)](https://github.com/aschrijver)[![148219809](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/148219809?v=4&w=50&h=50&mask=circle)](https://github.com/panzerfahrer)[![16724](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16724?v=4&w=50&h=50&mask=circle)](https://github.com/glasser)[![17330872](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17330872?v=4&w=50&h=50&mask=circle)](https://github.com/murph0)[![419419](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/419419?v=4&w=50&h=50&mask=circle)](https://github.com/zetaron)[![1014](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1014?v=4&w=50&h=50&mask=circle)](https://github.com/sunfmin)[![504507](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/504507?v=4&w=50&h=50&mask=circle)](https://github.com/guozheng)[![8841470](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8841470?v=4&w=50&h=50&mask=circle)](https://github.com/suusan2go)[![901479](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/901479?v=4&w=50&h=50&mask=circle)](https://github.com/mhaberler)[![353644](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/353644?v=4&w=50&h=50&mask=circle)](https://github.com/dreampuf)[![12421077](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/12421077?v=4&w=50&h=50&mask=circle)](https://github.com/UnicodingUnicorn)[![809865](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/809865?v=4&w=50&h=50&mask=circle)](https://github.com/philiptzou)[![19378](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/19378?v=4&w=50&h=50&mask=circle)](https://github.com/timabell)[![614934](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/614934?v=4&w=50&h=50&mask=circle)](https://github.com/adzenith)[![1113245](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1113245?v=4&w=50&h=50&mask=circle)](https://github.com/jasonhancock)[![101659](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/101659?v=4&w=50&h=50&mask=circle)](https://github.com/matryer)[![4730508](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/4730508?v=4&w=50&h=50&mask=circle)](https://github.com/piotrrojek)[![33036160](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/33036160?v=4&w=50&h=50&mask=circle)](https://github.com/jasonsattler)[![470810](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/470810?v=4&w=50&h=50&mask=circle)](https://github.com/sbward)[![7592392](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7592392?v=4&w=50&h=50&mask=circle)](https://github.com/Pisush)[![94814](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/94814?v=4&w=50&h=50&mask=circle)](https://github.com/tamalsaha)[![8147854](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8147854?v=4&w=50&h=50&mask=circle)](https://github.com/marianina8)[![1683714](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1683714?v=4&w=50&h=50&mask=circle)](https://github.com/naysayer)[![2807589](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2807589?v=4&w=50&h=50&mask=circle)](https://github.com/darwayne)[![17263167](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17263167?v=4&w=50&h=50&mask=circle)](https://github.com/jsteenb2)[![1005](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1005?v=4&w=50&h=50&mask=circle)](https://github.com/ernesto-jimenez)[![6386887](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6386887?v=4&w=50&h=50&mask=circle)](https://github.com/AgrimPrasad)[![615811](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/615811?v=4&w=50&h=50&mask=circle)](https://github.com/dahernan)[![75184](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/75184?v=4&w=50&h=50&mask=circle)](https://github.com/jtarchie)[![469669](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/469669?v=4&w=50&h=50&mask=circle)](https://github.com/jdtobe)[![28523](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/28523?v=4&w=50&h=50&mask=circle)](https://github.com/alrs)[![426880](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/426880?v=4&w=50&h=50&mask=circle)](https://github.com/tkent)[![10113228](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10113228?v=4&w=50&h=50&mask=circle)](https://github.com/urisimchoni)[![5751464](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5751464?v=4&w=50&h=50&mask=circle)](https://github.com/Xercoy)[![2405410](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2405410?v=4&w=50&h=50&mask=circle)](https://github.com/marbergq)[![5082160](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5082160?v=4&w=50&h=50&mask=circle)](https://github.com/anothrNick)[![11335612](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/11335612?v=4&w=50&h=50&mask=circle)](https://github.com/fermoya)[![23391642](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/23391642?v=4&w=50&h=50&mask=circle)](https://github.com/sbe-arg)[![1024762](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/1024762?v=4&w=50&h=50&mask=circle)](https://github.com/PeerXu)[![7390781](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/7390781?v=4&w=50&h=50&mask=circle)](https://github.com/reececomo)[![49680](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/49680?v=4&w=50&h=50&mask=circle)](https://github.com/dmerrick)[![87524](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/87524?v=4&w=50&h=50&mask=circle)](https://github.com/andrewcole)[![866505](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/866505?v=4&w=50&h=50&mask=circle)](https://github.com/phish108)[![2611549](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2611549?v=4&w=50&h=50&mask=circle)](https://github.com/endrjuskr)[![8232503](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8232503?v=4&w=50&h=50&mask=circle)](https://github.com/sjauld)[![118945041](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/118945041?v=4&w=50&h=50&mask=circle)](https://github.com/vq-ambiata)[![3807434](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3807434?v=4&w=50&h=50&mask=circle)](https://github.com/tomsolem)[![16513382](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16513382?v=4&w=50&h=50&mask=circle)](https://github.com/117)[![8320753](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8320753?v=4&w=50&h=50&mask=circle)](https://github.com/lovromazgon)[![5655837](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5655837?v=4&w=50&h=50&mask=circle)](https://github.com/gukoff)[![49961058](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/49961058?v=4&w=50&h=50&mask=circle)](https://github.com/bevans-HD)[![25625597](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25625597?v=4&w=50&h=50&mask=circle)](https://github.com/zero-below)[![62775347](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/62775347?v=4&w=50&h=50&mask=circle)](https://github.com/okozachenko1203)[![53085803](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/53085803?v=4&w=50&h=50&mask=circle)](https://github.com/cuttingedge1109)[![5067549](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/5067549?v=4&w=50&h=50&mask=circle)](https://github.com/pellared)[![25486791](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/25486791?v=4&w=50&h=50&mask=circle)](https://github.com/pavyarov)[![995707](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/995707?v=4&w=50&h=50&mask=circle)](https://github.com/OskarStark)[![2302957](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/2302957?v=4&w=50&h=50&mask=circle)](https://github.com/JeremyLWright)[![10090384](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/10090384?v=4&w=50&h=50&mask=circle)](https://github.com/ivanpk)[![17337515](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/17337515?v=4&w=50&h=50&mask=circle)](https://github.com/fabricepipart)[![8296645](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/8296645?v=4&w=50&h=50&mask=circle)](https://github.com/imdanielsp)[![6388483](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/6388483?v=4&w=50&h=50&mask=circle)](https://github.com/zsedem)[![69170839](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/69170839?v=4&w=50&h=50&mask=circle)](https://github.com/adam-berrio)[![282792](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/282792?v=4&w=50&h=50&mask=circle)](https://github.com/asford)[![38894122](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/38894122?v=4&w=50&h=50&mask=circle)](https://github.com/bmcconeghy)[![16698198](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/16698198?v=4&w=50&h=50&mask=circle)](https://github.com/conda-forge-admin)[![36490558](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/36490558?v=4&w=50&h=50&mask=circle)](https://github.com/regro-cf-autotick-bot)[![79913779](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/102928?v=4&w=50&h=50&mask=circle)](https://github.com/apps/conda-forge-curator)[![41898282](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/in/15368?v=4&w=50&h=50&mask=circle)](https://github.com/apps/github-actions)[![18567580](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/18567580?v=4&w=50&h=50&mask=circle)](https://github.com/conda-forge-linter)[![72671586](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/72671586?v=4&w=50&h=50&mask=circle)](https://github.com/pheianox)[![3760025](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/3760025?v=4&w=50&h=50&mask=circle)](https://github.com/gaga5lala)[![115705553](https://images.weserv.nl/?url=https://avatars.githubusercontent.com/u/115705553?v=4&w=50&h=50&mask=circle)](https://github.com/divyank000) ## License diff --git a/boilerplate/flyte/end2end/Makefile b/boilerplate/flyte/end2end/Makefile index 98ee63ae7a..983b6e22d9 100644 --- a/boilerplate/flyte/end2end/Makefile +++ b/boilerplate/flyte/end2end/Makefile @@ -7,8 +7,12 @@ end2end_execute: export FLYTESNACKS_PRIORITIES ?= P0 end2end_execute: export FLYTESNACKS_VERSION ?= $(shell curl --silent "https://api.github.com/repos/flyteorg/flytesnacks/releases/latest" | jq -r .tag_name) end2end_execute: - ./boilerplate/flyte/end2end/end2end.sh ./boilerplate/flyte/end2end/functional-test-config.yaml --return_non_zero_on_failure - + pytest ./boilerplate/flyte/end2end/test_run.py \ + --flytesnacks_release_tag=$(FLYTESNACKS_VERSION) \ + --priorities=$(FLYTESNACKS_PRIORITIES) \ + --config_file=./boilerplate/flyte/end2end/functional-test-config.yaml \ + --return_non_zero_on_failure + .PHONY: k8s_integration_execute k8s_integration_execute: echo "pass" diff --git a/boilerplate/flyte/end2end/conftest.py b/boilerplate/flyte/end2end/conftest.py new file mode 100644 index 0000000000..d77fad05d9 --- /dev/null +++ b/boilerplate/flyte/end2end/conftest.py @@ -0,0 +1,47 @@ +import pytest + +def pytest_addoption(parser): + parser.addoption("--flytesnacks_release_tag", required=True) + parser.addoption("--priorities", required=True) + parser.addoption("--config_file", required=True) + parser.addoption( + "--return_non_zero_on_failure", + action="store_true", + default=False, + help="Return a non-zero exit status if any workflow fails", + ) + parser.addoption( + "--terminate_workflow_on_failure", + action="store_true", + default=False, + help="Abort failing workflows upon exit", + ) + parser.addoption( + "--test_project_name", + default="flytesnacks", + help="Name of project to run functional tests on" + ) + parser.addoption( + "--test_project_domain", + default="development", + help="Name of domain in project to run functional tests on" + ) + parser.addoption( + "--cluster_pool_name", + required=False, + type=str, + default=None, + ) + +@pytest.fixture +def setup_flytesnacks_env(pytestconfig): + return { + "flytesnacks_release_tag": pytestconfig.getoption("--flytesnacks_release_tag"), + "priorities": pytestconfig.getoption("--priorities"), + "config_file": pytestconfig.getoption("--config_file"), + "return_non_zero_on_failure": pytestconfig.getoption("--return_non_zero_on_failure"), + "terminate_workflow_on_failure": pytestconfig.getoption("--terminate_workflow_on_failure"), + "test_project_name": pytestconfig.getoption("--test_project_name"), + "test_project_domain": pytestconfig.getoption("--test_project_domain"), + "cluster_pool_name": pytestconfig.getoption("--cluster_pool_name"), + } diff --git a/boilerplate/flyte/end2end/end2end.sh b/boilerplate/flyte/end2end/end2end.sh deleted file mode 100755 index 5dd825c1a0..0000000000 --- a/boilerplate/flyte/end2end/end2end.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst -set -eu - -CONFIG_FILE=$1; shift -EXTRA_FLAGS=( "$@" ) - -python ./boilerplate/flyte/end2end/run-tests.py $FLYTESNACKS_VERSION $FLYTESNACKS_PRIORITIES $CONFIG_FILE ${EXTRA_FLAGS[@]} diff --git a/boilerplate/flyte/end2end/run-tests.py b/boilerplate/flyte/end2end/test_run.py similarity index 76% rename from boilerplate/flyte/end2end/run-tests.py rename to boilerplate/flyte/end2end/test_run.py index f6558247a6..b300ee974a 100644 --- a/boilerplate/flyte/end2end/run-tests.py +++ b/boilerplate/flyte/end2end/test_run.py @@ -5,7 +5,7 @@ import traceback from typing import Dict, List, Optional -import click +import pytest import requests from flytekit.configuration import Config from flytekit.models.core.execution import WorkflowExecutionPhase @@ -15,7 +15,6 @@ WAIT_TIME = 10 MAX_ATTEMPTS = 200 - def execute_workflow( remote: FlyteRemote, version, @@ -27,7 +26,6 @@ def execute_workflow( wf = remote.fetch_workflow(name=workflow_name, version=version) return remote.execute(wf, inputs=inputs, wait=False, cluster_pool=cluster_pool_name) - def executions_finished( executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]] ) -> bool: @@ -36,7 +34,6 @@ def executions_finished( return False return True - def sync_executions( remote: FlyteRemote, executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]] ): @@ -50,13 +47,11 @@ def sync_executions( print("GOT TO THE EXCEPT") print("COUNT THIS!") - def report_executions(executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]]): for executions in executions_by_wfgroup.values(): for execution in executions: print(execution) - def schedule_workflow_groups( tag: str, workflow_groups: List[str], @@ -65,10 +60,6 @@ def schedule_workflow_groups( parsed_manifest: List[dict], cluster_pool_name: Optional[str] = None, ) -> Dict[str, bool]: - """ - Schedule workflows executions for all workflow groups and return True if all executions succeed, otherwise - return False. - """ executions_by_wfgroup = {} # Schedule executions for each workflow group, for wf_group in workflow_groups: @@ -120,7 +111,6 @@ def schedule_workflow_groups( results[wf_group] = len(non_succeeded_executions) == 0 return results - def valid(workflow_group, parsed_manifest): """ Return True if a workflow group is contained in parsed_manifest, @@ -128,22 +118,25 @@ def valid(workflow_group, parsed_manifest): """ return workflow_group in set(wf_group["name"] for wf_group in parsed_manifest) +def test_run(setup_flytesnacks_env): + + env = setup_flytesnacks_env + + flytesnacks_release_tag = env["flytesnacks_release_tag"] + priorities = env["priorities"] + config_file_path = env["config_file"] + terminate_workflow_on_failure = env["terminate_workflow_on_failure"] + test_project_name = env["test_project_name"] + test_project_domain = env["test_project_domain"] + cluster_pool_name = env["cluster_pool_name"] + return_non_zero_on_failure = env["return_non_zero_on_failure"] -def run( - flytesnacks_release_tag: str, - priorities: List[str], - config_file_path, - terminate_workflow_on_failure: bool, - test_project_name: str, - test_project_domain: str, - cluster_pool_name: Optional[str] = None, -) -> List[Dict[str, str]]: remote = FlyteRemote( Config.auto(config_file=config_file_path), test_project_name, test_project_domain, ) - + # For a given release tag and priority, this function filters the workflow groups from the flytesnacks # manifest file. For example, for the release tag "v0.2.224" and the priority "P0" it returns [ "core" ]. manifest_url = ( @@ -210,75 +203,15 @@ def run( "color": background_color, } results.append(result) - return results - - -@click.command() -@click.argument("flytesnacks_release_tag") -@click.argument("priorities") -@click.argument("config_file") -@click.option( - "--return_non_zero_on_failure", - default=False, - is_flag=True, - help="Return a non-zero exit status if any workflow fails", -) -@click.option( - "--terminate_workflow_on_failure", - default=False, - is_flag=True, - help="Abort failing workflows upon exit", -) -@click.option( - "--test_project_name", - default="flytesnacks", - type=str, - is_flag=False, - help="Name of project to run functional tests on", -) -@click.option( - "--test_project_domain", - default="development", - type=str, - is_flag=False, - help="Name of domain in project to run functional tests on", -) -@click.argument( - "cluster_pool_name", - required=False, - type=str, - default=None, -) -def cli( - flytesnacks_release_tag, - priorities, - config_file, - return_non_zero_on_failure, - terminate_workflow_on_failure, - test_project_name, - test_project_domain, - cluster_pool_name, -): - print(f"return_non_zero_on_failure={return_non_zero_on_failure}") - results = run( - flytesnacks_release_tag, - priorities, - config_file, - terminate_workflow_on_failure, - test_project_name, - test_project_domain, - cluster_pool_name, - ) - # Write a json object in its own line describing the result of this run to stdout print(f"Result of run:\n{json.dumps(results)}") - # Return a non-zero exit code if core fails if return_non_zero_on_failure: - for result in results: - if result["status"] not in ("passing", "coming soon"): - sys.exit(1) - - -if __name__ == "__main__": - cli() + fail_results = [result for result in results if result["status"] not in ("passing", "coming soon")] + if fail_results: + fail_msgs = [ + f"Workflow '{r['label']}' failed with status '{r['status']}'" for r in fail_results + ] + pytest.fail("\n".join(fail_msgs)) + + assert results == [{"label": "core", "status": "passing", "color": "green"}] diff --git a/boilerplate/flyte/golang_support_tools/go.mod b/boilerplate/flyte/golang_support_tools/go.mod index c7676dd907..469b42d079 100644 --- a/boilerplate/flyte/golang_support_tools/go.mod +++ b/boilerplate/flyte/golang_support_tools/go.mod @@ -1,12 +1,12 @@ module github.com/flyteorg/boilerplate -go 1.22 +go 1.22.1 require ( github.com/EngHabu/mockery v0.0.0-20220916190332-dde70e38baba github.com/alvaroloes/enumer v1.1.2 github.com/flyteorg/flyte/flytestdlib v1.11.0 - github.com/golangci/golangci-lint v1.53.3 + github.com/golangci/golangci-lint v1.61.0 github.com/pseudomuto/protoc-gen-doc v1.4.1 github.com/vektra/mockery/v2 v2.40.3 ) @@ -14,191 +14,198 @@ require ( require ( 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect 4d63.com/gochecknoglobals v0.2.1 // indirect - cloud.google.com/go v0.112.0 // indirect - cloud.google.com/go/compute v1.23.3 // indirect - cloud.google.com/go/compute/metadata v0.2.3 // indirect - cloud.google.com/go/iam v1.1.5 // indirect - cloud.google.com/go/storage v1.36.0 // indirect - github.com/4meepo/tagalign v1.2.2 // indirect - github.com/Abirdcfly/dupword v0.0.11 // indirect - github.com/Antonboom/errname v0.1.10 // indirect - github.com/Antonboom/nilnil v0.1.5 // indirect + cloud.google.com/go v0.115.1 // indirect + cloud.google.com/go/auth v0.9.3 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.4 // indirect + cloud.google.com/go/compute/metadata v0.5.0 // indirect + cloud.google.com/go/iam v1.2.0 // indirect + cloud.google.com/go/storage v1.43.0 // indirect + github.com/4meepo/tagalign v1.3.4 // indirect + github.com/Abirdcfly/dupword v0.1.1 // indirect + github.com/Antonboom/errname v0.1.13 // indirect + github.com/Antonboom/nilnil v0.1.9 // indirect + github.com/Antonboom/testifylint v1.4.3 // indirect github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.0 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect - github.com/BurntSushi/toml v1.3.2 // indirect + github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect + github.com/Crocmagnon/fatcontext v0.5.2 // indirect github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect - github.com/GaijinEntertainment/go-exhaustruct/v2 v2.3.0 // indirect + github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0 // indirect github.com/Masterminds/semver v1.5.0 // indirect + github.com/Masterminds/semver/v3 v3.3.0 // indirect github.com/Masterminds/sprig v2.15.0+incompatible // indirect - github.com/OpenPeeDeeP/depguard/v2 v2.1.0 // indirect - github.com/alexkohler/nakedret/v2 v2.0.2 // indirect + github.com/OpenPeeDeeP/depguard/v2 v2.2.0 // indirect + github.com/alecthomas/go-check-sumtype v0.1.4 // indirect + github.com/alexkohler/nakedret/v2 v2.0.4 // indirect github.com/alexkohler/prealloc v1.0.0 // indirect github.com/alingse/asasalint v0.0.11 // indirect github.com/aokoli/goutils v1.0.1 // indirect - github.com/ashanbrown/forbidigo v1.5.3 // indirect + github.com/ashanbrown/forbidigo v1.6.0 // indirect github.com/ashanbrown/makezero v1.1.1 // indirect github.com/aws/aws-sdk-go v1.44.2 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bkielbasa/cyclop v1.2.1 // indirect github.com/blizzy78/varnamelen v0.8.0 // indirect - github.com/bombsimon/wsl/v3 v3.4.0 // indirect - github.com/breml/bidichk v0.2.4 // indirect - github.com/breml/errchkjson v0.3.1 // indirect - github.com/butuzov/ireturn v0.2.0 // indirect - github.com/butuzov/mirror v1.1.0 // indirect + github.com/bombsimon/wsl/v4 v4.4.1 // indirect + github.com/breml/bidichk v0.2.7 // indirect + github.com/breml/errchkjson v0.3.6 // indirect + github.com/butuzov/ireturn v0.3.0 // indirect + github.com/butuzov/mirror v1.2.0 // indirect + github.com/catenacyber/perfsprint v0.7.1 // indirect + github.com/ccojocar/zxcvbn-go v1.0.2 // indirect github.com/cespare/xxhash v1.1.0 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/charithe/durationcheck v0.0.10 // indirect - github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8 // indirect + github.com/chavacava/garif v0.1.0 // indirect github.com/chigopher/pathlib v0.19.1 // indirect + github.com/ckaznocha/intrange v0.2.0 // indirect github.com/coocood/freecache v1.1.1 // indirect github.com/curioswitch/go-reassign v0.2.0 // indirect - github.com/daixiang0/gci v0.10.1 // indirect + github.com/daixiang0/gci v0.13.5 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/denis-tingaikin/go-header v0.4.3 // indirect + github.com/denis-tingaikin/go-header v0.5.0 // indirect github.com/emicklei/go-restful/v3 v3.9.0 // indirect github.com/envoyproxy/protoc-gen-validate v1.0.4 // indirect github.com/ernesto-jimenez/gogen v0.0.0-20180125220232-d7d4131e6607 // indirect - github.com/esimonov/ifshort v1.0.4 // indirect - github.com/ettle/strcase v0.1.1 // indirect + github.com/ettle/strcase v0.2.0 // indirect github.com/evanphx/json-patch/v5 v5.6.0 // indirect - github.com/fatih/color v1.15.0 // indirect + github.com/fatih/color v1.17.0 // indirect github.com/fatih/structtag v1.2.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/firefart/nonamedreturns v1.0.4 // indirect + github.com/firefart/nonamedreturns v1.0.5 // indirect github.com/flyteorg/stow v0.3.10 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/fzipp/gocyclo v0.6.0 // indirect github.com/ghodss/yaml v1.0.0 // indirect - github.com/go-critic/go-critic v0.8.1 // indirect - github.com/go-logr/logr v1.3.0 // indirect + github.com/ghostiam/protogetter v0.3.6 // indirect + github.com/go-critic/go-critic v0.11.4 // indirect + github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.19.6 // indirect github.com/go-openapi/jsonreference v0.20.2 // indirect github.com/go-openapi/swag v0.22.3 // indirect github.com/go-toolsmith/astcast v1.1.0 // indirect github.com/go-toolsmith/astcopy v1.1.0 // indirect - github.com/go-toolsmith/astequal v1.1.0 // indirect + github.com/go-toolsmith/astequal v1.2.0 // indirect github.com/go-toolsmith/astfmt v1.1.0 // indirect github.com/go-toolsmith/astp v1.1.0 // indirect github.com/go-toolsmith/strparse v1.1.0 // indirect github.com/go-toolsmith/typep v1.1.0 // indirect + github.com/go-viper/mapstructure/v2 v2.1.0 // indirect github.com/go-xmlfmt/xmlfmt v1.1.2 // indirect github.com/gobwas/glob v0.2.3 // indirect - github.com/gofrs/flock v0.8.1 // indirect + github.com/gofrs/flock v0.12.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v5 v5.2.1 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect - github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 // indirect github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect - github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe // indirect - github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2 // indirect - github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 // indirect - github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca // indirect - github.com/golangci/misspell v0.4.0 // indirect - github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 // indirect - github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 // indirect + github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9 // indirect + github.com/golangci/misspell v0.6.0 // indirect + github.com/golangci/modinfo v0.3.4 // indirect + github.com/golangci/plugin-module-register v0.1.1 // indirect + github.com/golangci/revgrep v0.5.3 // indirect + github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect github.com/google/gnostic-models v0.6.8 // indirect github.com/google/go-cmp v0.6.0 // indirect github.com/google/gofuzz v1.2.0 // indirect - github.com/google/s2a-go v0.1.7 // indirect + github.com/google/s2a-go v0.1.8 // indirect github.com/google/uuid v1.6.0 // indirect - github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect - github.com/googleapis/gax-go/v2 v2.12.0 // indirect - github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.3 // indirect + github.com/googleapis/gax-go/v2 v2.13.0 // indirect + github.com/gordonklaus/ineffassign v0.1.0 // indirect github.com/gostaticanalysis/analysisutil v0.7.1 // indirect github.com/gostaticanalysis/comment v1.4.2 // indirect github.com/gostaticanalysis/forcetypeassert v0.1.0 // indirect github.com/gostaticanalysis/nilerr v0.1.1 // indirect - github.com/hashicorp/errwrap v1.1.0 // indirect - github.com/hashicorp/go-multierror v1.1.1 // indirect - github.com/hashicorp/go-version v1.6.0 // indirect + github.com/hashicorp/go-version v1.7.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/hexops/gotextdiff v1.0.3 // indirect github.com/huandu/xstrings v1.4.0 // indirect github.com/iancoleman/strcase v0.3.0 // indirect github.com/imdario/mergo v0.3.6 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/jgautheron/goconst v1.5.1 // indirect + github.com/jgautheron/goconst v1.7.1 // indirect github.com/jingyugao/rowserrcheck v1.1.1 // indirect github.com/jinzhu/copier v0.3.5 // indirect github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect + github.com/jjti/go-spancheck v0.6.2 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/julz/importas v0.1.0 // indirect - github.com/kisielk/errcheck v1.6.3 // indirect - github.com/kisielk/gotool v1.0.0 // indirect - github.com/kkHAIKE/contextcheck v1.1.4 // indirect + github.com/karamaru-alpha/copyloopvar v1.1.0 // indirect + github.com/kisielk/errcheck v1.7.0 // indirect + github.com/kkHAIKE/contextcheck v1.1.5 // indirect github.com/kulti/thelper v0.6.3 // indirect - github.com/kunwardeep/paralleltest v1.0.7 // indirect + github.com/kunwardeep/paralleltest v1.0.10 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/kyoh86/exportloopref v0.1.11 // indirect - github.com/ldez/gomoddirectives v0.2.3 // indirect + github.com/lasiar/canonicalheader v1.1.1 // indirect + github.com/ldez/gomoddirectives v0.2.4 // indirect github.com/ldez/tagliatelle v0.5.0 // indirect - github.com/leonklingele/grouper v1.1.1 // indirect + github.com/leonklingele/grouper v1.1.2 // indirect github.com/lufeee/execinquery v1.2.1 // indirect + github.com/macabu/inamedparam v0.1.3 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/maratori/testableexamples v1.0.0 // indirect github.com/maratori/testpackage v1.1.1 // indirect github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 // indirect github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.17 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.9 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect - github.com/mbilski/exhaustivestruct v1.2.0 // indirect - github.com/mgechev/revive v1.3.2 // indirect + github.com/mgechev/revive v1.3.9 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/moricho/tparallel v0.3.1 // indirect + github.com/moricho/tparallel v0.3.2 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007 // indirect github.com/nakabonne/nestif v0.3.1 // indirect - github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 // indirect github.com/ncw/swift v1.0.53 // indirect - github.com/nishanths/exhaustive v0.11.0 // indirect + github.com/nishanths/exhaustive v0.12.0 // indirect github.com/nishanths/predeclared v0.2.2 // indirect - github.com/nunnatsa/ginkgolinter v0.12.1 // indirect + github.com/nunnatsa/ginkgolinter v0.16.2 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/pascaldekloe/name v0.0.0-20180628100202-0fd16699aae1 // indirect - github.com/pelletier/go-toml/v2 v2.0.6 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/polyfloyd/go-errorlint v1.4.2 // indirect + github.com/polyfloyd/go-errorlint v1.6.0 // indirect github.com/prometheus/client_golang v1.16.0 // indirect github.com/prometheus/client_model v0.4.0 // indirect github.com/prometheus/common v0.44.0 // indirect github.com/prometheus/procfs v0.10.1 // indirect github.com/pseudomuto/protokit v0.2.0 // indirect - github.com/quasilyte/go-ruleguard v0.3.19 // indirect + github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 // indirect + github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect github.com/quasilyte/gogrep v0.5.0 // indirect github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect github.com/rs/zerolog v1.29.0 // indirect - github.com/ryancurrah/gomodguard v1.3.0 // indirect - github.com/ryanrolds/sqlclosecheck v0.4.0 // indirect + github.com/ryancurrah/gomodguard v1.3.5 // indirect + github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect github.com/sanposhiho/wastedassign/v2 v2.0.7 // indirect + github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect github.com/sashamelentyev/interfacebloat v1.1.0 // indirect - github.com/sashamelentyev/usestdlibvars v1.23.0 // indirect - github.com/securego/gosec/v2 v2.16.0 // indirect + github.com/sashamelentyev/usestdlibvars v1.27.0 // indirect + github.com/securego/gosec/v2 v2.21.2 // indirect github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sivchari/containedctx v1.0.3 // indirect - github.com/sivchari/nosnakecase v1.7.0 // indirect - github.com/sivchari/tenv v1.7.1 // indirect + github.com/sivchari/tenv v1.10.0 // indirect github.com/sonatard/noctx v0.0.2 // indirect github.com/sourcegraph/go-diff v0.7.0 // indirect - github.com/spf13/afero v1.10.0 // indirect + github.com/spf13/afero v1.11.0 // indirect github.com/spf13/cast v1.5.0 // indirect - github.com/spf13/cobra v1.7.0 // indirect + github.com/spf13/cobra v1.8.1 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/viper v1.15.0 // indirect @@ -207,67 +214,65 @@ require ( github.com/stretchr/objx v0.5.2 // indirect github.com/stretchr/testify v1.9.0 // indirect github.com/subosito/gotenv v1.4.2 // indirect - github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect github.com/tdakkota/asciicheck v0.2.0 // indirect - github.com/tetafro/godot v1.4.11 // indirect + github.com/tetafro/godot v1.4.17 // indirect github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect github.com/timonwong/loggercheck v0.9.4 // indirect - github.com/tomarrell/wrapcheck/v2 v2.8.1 // indirect + github.com/tomarrell/wrapcheck/v2 v2.9.0 // indirect github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect - github.com/ultraware/funlen v0.0.3 // indirect - github.com/ultraware/whitespace v0.0.5 // indirect - github.com/uudashr/gocognit v1.0.6 // indirect - github.com/xen0n/gosmopolitan v1.2.1 // indirect + github.com/ultraware/funlen v0.1.0 // indirect + github.com/ultraware/whitespace v0.1.1 // indirect + github.com/uudashr/gocognit v1.1.3 // indirect + github.com/xen0n/gosmopolitan v1.2.2 // indirect github.com/yagipy/maintidx v1.0.0 // indirect - github.com/yeya24/promlinter v0.2.0 // indirect - github.com/ykadowak/zerologlint v0.1.2 // indirect - gitlab.com/bosi/decorder v0.2.3 // indirect + github.com/yeya24/promlinter v0.3.0 // indirect + github.com/ykadowak/zerologlint v0.1.5 // indirect + gitlab.com/bosi/decorder v0.4.2 // indirect + go-simpler.org/musttag v0.12.2 // indirect + go-simpler.org/sloglint v0.7.2 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect - go.opentelemetry.io/otel v1.21.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect + go.opentelemetry.io/otel v1.29.0 // indirect go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0 // indirect - go.opentelemetry.io/otel/metric v1.21.0 // indirect - go.opentelemetry.io/otel/sdk v1.21.0 // indirect - go.opentelemetry.io/otel/trace v1.21.0 // indirect - go.tmz.dev/musttag v0.7.0 // indirect + go.opentelemetry.io/otel/metric v1.29.0 // indirect + go.opentelemetry.io/otel/sdk v1.28.0 // indirect + go.opentelemetry.io/otel/trace v1.29.0 // indirect + go.uber.org/automaxprocs v1.5.3 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.25.0 // indirect - golang.org/x/crypto v0.25.0 // indirect - golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea // indirect - golang.org/x/exp/typeparams v0.0.0-20230224173230-c95f2b4c22f2 // indirect - golang.org/x/mod v0.17.0 // indirect - golang.org/x/net v0.27.0 // indirect - golang.org/x/oauth2 v0.16.0 // indirect - golang.org/x/sync v0.7.0 // indirect - golang.org/x/sys v0.22.0 // indirect - golang.org/x/term v0.22.0 // indirect - golang.org/x/text v0.16.0 // indirect - golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect - google.golang.org/api v0.155.0 // indirect - google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 // indirect - google.golang.org/grpc v1.62.1 // indirect - google.golang.org/protobuf v1.33.0 // indirect + golang.org/x/crypto v0.27.0 // indirect + golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e // indirect + golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f // indirect + golang.org/x/mod v0.21.0 // indirect + golang.org/x/net v0.28.0 // indirect + golang.org/x/oauth2 v0.22.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.25.0 // indirect + golang.org/x/term v0.24.0 // indirect + golang.org/x/text v0.18.0 // indirect + golang.org/x/time v0.6.0 // indirect + golang.org/x/tools v0.24.0 // indirect + google.golang.org/api v0.196.0 // indirect + google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240827150818-7e3bb234dfed // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect + google.golang.org/grpc v1.66.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - honnef.co/go/tools v0.4.3 // indirect + honnef.co/go/tools v0.5.1 // indirect k8s.io/api v0.28.2 // indirect k8s.io/apimachinery v0.28.2 // indirect k8s.io/client-go v0.28.1 // indirect k8s.io/klog/v2 v2.100.1 // indirect k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9 // indirect k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 // indirect - mvdan.cc/gofumpt v0.5.0 // indirect - mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed // indirect - mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect - mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d // indirect + mvdan.cc/gofumpt v0.7.0 // indirect + mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f // indirect sigs.k8s.io/controller-runtime v0.0.0-00010101000000-000000000000 // indirect sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect diff --git a/boilerplate/flyte/golang_support_tools/go.sum b/boilerplate/flyte/golang_support_tools/go.sum index 6aa6b8d969..fc017b6f44 100644 --- a/boilerplate/flyte/golang_support_tools/go.sum +++ b/boilerplate/flyte/golang_support_tools/go.sum @@ -3,61 +3,30 @@ 4d63.com/gochecknoglobals v0.2.1 h1:1eiorGsgHOFOuoOiJDy2psSrQbRdIHrlge0IJIkUgDc= 4d63.com/gochecknoglobals v0.2.1/go.mod h1:KRE8wtJB3CXCsb1xy421JfTHIIbmT3U5ruxw2Qu8fSU= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= -cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk= -cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI= -cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= -cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI= -cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -cloud.google.com/go/storage v1.36.0 h1:P0mOkAcaJxhCTvAkMhxMfrTKiNcub4YmmPBtlhAyTr8= -cloud.google.com/go/storage v1.36.0/go.mod h1:M6M/3V/D3KpzMTJyPOR/HU6n2Si5QdaXYEsng2xgOs8= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/4meepo/tagalign v1.2.2 h1:kQeUTkFTaBRtd/7jm8OKJl9iHk0gAO+TDFPHGSna0aw= -github.com/4meepo/tagalign v1.2.2/go.mod h1:Q9c1rYMZJc9dPRkbQPpcBNCLEmY2njbAsXhQOZFE2dE= -github.com/Abirdcfly/dupword v0.0.11 h1:z6v8rMETchZXUIuHxYNmlUAuKuB21PeaSymTed16wgU= -github.com/Abirdcfly/dupword v0.0.11/go.mod h1:wH8mVGuf3CP5fsBTkfWwwwKTjDnVVCxtU8d8rgeVYXA= -github.com/Antonboom/errname v0.1.10 h1:RZ7cYo/GuZqjr1nuJLNe8ZH+a+Jd9DaZzttWzak9Bls= -github.com/Antonboom/errname v0.1.10/go.mod h1:xLeiCIrvVNpUtsN0wxAh05bNIZpqE22/qDMnTBTttiA= -github.com/Antonboom/nilnil v0.1.5 h1:X2JAdEVcbPaOom2TUa1FxZ3uyuUlex0XMLGYMemu6l0= -github.com/Antonboom/nilnil v0.1.5/go.mod h1:I24toVuBKhfP5teihGWctrRiPbRKHwZIFOvc6v3HZXk= +cloud.google.com/go v0.115.1 h1:Jo0SM9cQnSkYfp44+v+NQXHpcHqlnRJk2qxh6yvxxxQ= +cloud.google.com/go v0.115.1/go.mod h1:DuujITeaufu3gL68/lOFIirVNJwQeyf5UXyi+Wbgknc= +cloud.google.com/go/auth v0.9.3 h1:VOEUIAADkkLtyfr3BLa3R8Ed/j6w1jTBmARx+wb5w5U= +cloud.google.com/go/auth v0.9.3/go.mod h1:7z6VY+7h3KUdRov5F1i8NDP5ZzWKYmEPO842BgCsmTk= +cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY= +cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc= +cloud.google.com/go/compute/metadata v0.5.0 h1:Zr0eK8JbFv6+Wi4ilXAR8FJ3wyNdpxHKJNPos6LTZOY= +cloud.google.com/go/compute/metadata v0.5.0/go.mod h1:aHnloV2TPI38yx4s9+wAZhHykWvVCfu7hQbF+9CWoiY= +cloud.google.com/go/iam v1.2.0 h1:kZKMKVNk/IsSSc/udOb83K0hL/Yh/Gcqpz+oAkoIFN8= +cloud.google.com/go/iam v1.2.0/go.mod h1:zITGuWgsLZxd8OwAlX+eMFgZDXzBm7icj1PVTYG766Q= +cloud.google.com/go/longrunning v0.6.0 h1:mM1ZmaNsQsnb+5n1DNPeL0KwQd9jQRqSqSDEkBZr+aI= +cloud.google.com/go/longrunning v0.6.0/go.mod h1:uHzSZqW89h7/pasCWNYdUpwGz3PcVWhrWupreVPYLts= +cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyXFs= +cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0= +github.com/4meepo/tagalign v1.3.4 h1:P51VcvBnf04YkHzjfclN6BbsopfJR5rxs1n+5zHt+w8= +github.com/4meepo/tagalign v1.3.4/go.mod h1:M+pnkHH2vG8+qhE5bVc/zeP7HS/j910Fwa9TUSyZVI0= +github.com/Abirdcfly/dupword v0.1.1 h1:Bsxe0fIw6OwBtXMIncaTxCLHYO5BB+3mcsR5E8VXloY= +github.com/Abirdcfly/dupword v0.1.1/go.mod h1:B49AcJdTYYkpd4HjgAcutNGG9HZ2JWwKunH9Y2BA6sM= +github.com/Antonboom/errname v0.1.13 h1:JHICqsewj/fNckzrfVSe+T33svwQxmjC+1ntDsHOVvM= +github.com/Antonboom/errname v0.1.13/go.mod h1:uWyefRYRN54lBg6HseYCFhs6Qjcy41Y3Jl/dVhA87Ns= +github.com/Antonboom/nilnil v0.1.9 h1:eKFMejSxPSA9eLSensFmjW2XTgTwJMjZ8hUHtV4s/SQ= +github.com/Antonboom/nilnil v0.1.9/go.mod h1:iGe2rYwCq5/Me1khrysB4nwI7swQvjclR8/YRPl5ihQ= +github.com/Antonboom/testifylint v1.4.3 h1:ohMt6AHuHgttaQ1xb6SSnxCeK4/rnK7KKzbvs7DmEck= +github.com/Antonboom/testifylint v1.4.3/go.mod h1:+8Q9+AOLsz5ZiQiiYujJKs9mNz398+M6UgslP4qgJLA= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0 h1:GJHeeA2N7xrG3q30L2UXDyuWRzDM900/65j70wcM4Ww= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0/go.mod h1:l38EPgmsp71HHLq9j7De57JcKOWPyhrsW1Awm1JS6K0= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc= @@ -71,31 +40,35 @@ github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.0/go.mod h1:WCPBHsOXfB github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= -github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/Crocmagnon/fatcontext v0.5.2 h1:vhSEg8Gqng8awhPju2w7MKHqMlg4/NI+gSDHtR3xgwA= +github.com/Crocmagnon/fatcontext v0.5.2/go.mod h1:87XhRMaInHP44Q7Tlc7jkgKKB7kZAOPiDkFMdKCC+74= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= github.com/EngHabu/mockery v0.0.0-20220916190332-dde70e38baba h1:HDBbUo0odjuCCtStDS//vNd3CeP1GdjQVhFmSZLnFwU= github.com/EngHabu/mockery v0.0.0-20220916190332-dde70e38baba/go.mod h1:DjqxgJ6VUERvvVE41d4Rrn72K29MXwk9ziY18bi36BU= -github.com/GaijinEntertainment/go-exhaustruct/v2 v2.3.0 h1:+r1rSv4gvYn0wmRjC8X7IAzX8QezqtFV9m0MUHFJgts= -github.com/GaijinEntertainment/go-exhaustruct/v2 v2.3.0/go.mod h1:b3g59n2Y+T5xmcxJL+UEG2f8cQploZm1mR/v6BW0mU0= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0 h1:/fTUt5vmbkAcMBt4YQiuC23cV0kEsN1MVMNqeOW43cU= +github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.0/go.mod h1:ONJg5sxcbsdQQ4pOW8TGdTidT2TMAUy/2Xhr8mrYaao= github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0= +github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/Masterminds/sprig v2.15.0+incompatible h1:0gSxPGWS9PAr7U2NsQ2YQg6juRDINkUyuvbb4b2Xm8w= github.com/Masterminds/sprig v2.15.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/OpenPeeDeeP/depguard/v2 v2.1.0 h1:aQl70G173h/GZYhWf36aE5H0KaujXfVMnn/f1kSDVYY= -github.com/OpenPeeDeeP/depguard/v2 v2.1.0/go.mod h1:PUBgk35fX4i7JDmwzlJwJ+GMe6NfO1723wmJMgPThNQ= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/alexkohler/nakedret/v2 v2.0.2 h1:qnXuZNvv3/AxkAb22q/sEsEpcA99YxLFACDtEw9TPxE= -github.com/alexkohler/nakedret/v2 v2.0.2/go.mod h1:2b8Gkk0GsOrqQv/gPWjNLDSKwG8I5moSXG1K4VIBcTQ= +github.com/OpenPeeDeeP/depguard/v2 v2.2.0 h1:vDfG60vDtIuf0MEOhmLlLLSzqaRM8EMcgJPdp74zmpA= +github.com/OpenPeeDeeP/depguard/v2 v2.2.0/go.mod h1:CIzddKRvLBC4Au5aYP/i3nyaWQ+ClszLIuVocRiCYFQ= +github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk= +github.com/alecthomas/assert/v2 v2.2.2/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= +github.com/alecthomas/go-check-sumtype v0.1.4 h1:WCvlB3l5Vq5dZQTFmodqL2g68uHiSwwlWcT5a2FGK0c= +github.com/alecthomas/go-check-sumtype v0.1.4/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ= +github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= +github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= +github.com/alexkohler/nakedret/v2 v2.0.4 h1:yZuKmjqGi0pSmjGpOC016LtPJysIL0WEUiaXW5SUnNg= +github.com/alexkohler/nakedret/v2 v2.0.4/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU= github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= @@ -104,98 +77,89 @@ github.com/alvaroloes/enumer v1.1.2 h1:5khqHB33TZy1GWCO/lZwcroBFh7u+0j40T83VUbfA github.com/alvaroloes/enumer v1.1.2/go.mod h1:FxrjvuXoDAx9isTJrv4c+T410zFi0DtXIT0m65DJ+Wo= github.com/aokoli/goutils v1.0.1 h1:7fpzNGoJ3VA8qcrm++XEE1QUe0mIwNeLa02Nwq7RDkg= github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g9DP+DQ= -github.com/ashanbrown/forbidigo v1.5.3 h1:jfg+fkm/snMx+V9FBwsl1d340BV/99kZGv5jN9hBoXk= -github.com/ashanbrown/forbidigo v1.5.3/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU= +github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8gerOIVIY= +github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU= github.com/ashanbrown/makezero v1.1.1 h1:iCQ87C0V0vSyO+M9E/FZYbu65auqH0lnsOkf5FcB28s= github.com/ashanbrown/makezero v1.1.1/go.mod h1:i1bJLCRSCHOcOa9Y6MyF2FTfMZMFdHvxKHxgO5Z1axI= github.com/aws/aws-sdk-go v1.44.2 h1:5VBk5r06bgxgRKVaUtm1/4NT/rtrnH2E4cnAYv5zgQc= github.com/aws/aws-sdk-go v1.44.2/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bkielbasa/cyclop v1.2.1 h1:AeF71HZDob1P2/pRm1so9cd1alZnrpyc4q2uP2l0gJY= github.com/bkielbasa/cyclop v1.2.1/go.mod h1:K/dT/M0FPAiYjBgQGau7tz+3TMh4FWAEqlMhzFWCrgM= github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= -github.com/bombsimon/wsl/v3 v3.4.0 h1:RkSxjT3tmlptwfgEgTgU+KYKLI35p/tviNXNXiL2aNU= -github.com/bombsimon/wsl/v3 v3.4.0/go.mod h1:KkIB+TXkqy6MvK9BDZVbZxKNYsE1/oLRJbIFtf14qqo= -github.com/breml/bidichk v0.2.4 h1:i3yedFWWQ7YzjdZJHnPo9d/xURinSq3OM+gyM43K4/8= -github.com/breml/bidichk v0.2.4/go.mod h1:7Zk0kRFt1LIZxtQdl9W9JwGAcLTTkOs+tN7wuEYGJ3s= -github.com/breml/errchkjson v0.3.1 h1:hlIeXuspTyt8Y/UmP5qy1JocGNR00KQHgfaNtRAjoxQ= -github.com/breml/errchkjson v0.3.1/go.mod h1:XroxrzKjdiutFyW3nWhw34VGg7kiMsDQox73yWCGI2U= -github.com/butuzov/ireturn v0.2.0 h1:kCHi+YzC150GE98WFuZQu9yrTn6GEydO2AuPLbTgnO4= -github.com/butuzov/ireturn v0.2.0/go.mod h1:Wh6Zl3IMtTpaIKbmwzqi6olnM9ptYQxxVacMsOEFPoc= -github.com/butuzov/mirror v1.1.0 h1:ZqX54gBVMXu78QLoiqdwpl2mgmoOJTk7s4p4o+0avZI= -github.com/butuzov/mirror v1.1.0/go.mod h1:8Q0BdQU6rC6WILDiBM60DBfvV78OLJmMmixe7GF45AE= +github.com/bombsimon/wsl/v4 v4.4.1 h1:jfUaCkN+aUpobrMO24zwyAMwMAV5eSziCkOKEauOLdw= +github.com/bombsimon/wsl/v4 v4.4.1/go.mod h1:Xu/kDxGZTofQcDGCtQe9KCzhHphIe0fDuyWTxER9Feo= +github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY= +github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ= +github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA= +github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U= +github.com/butuzov/ireturn v0.3.0 h1:hTjMqWw3y5JC3kpnC5vXmFJAWI/m31jaCYQqzkS6PL0= +github.com/butuzov/ireturn v0.3.0/go.mod h1:A09nIiwiqzN/IoVo9ogpa0Hzi9fex1kd9PSD6edP5ZA= +github.com/butuzov/mirror v1.2.0 h1:9YVK1qIjNspaqWutSv8gsge2e/Xpq1eqEkslEUHy5cs= +github.com/butuzov/mirror v1.2.0/go.mod h1:DqZZDtzm42wIAIyHXeN8W/qb1EPlb9Qn/if9icBOpdQ= +github.com/catenacyber/perfsprint v0.7.1 h1:PGW5G/Kxn+YrN04cRAZKC+ZuvlVwolYMrIyyTJ/rMmc= +github.com/catenacyber/perfsprint v0.7.1/go.mod h1:/wclWYompEyjUD2FuIIDVKNkqz7IgBIWXIH3V0Zol50= +github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg= +github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4= github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ= -github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8 h1:W9o46d2kbNL06lq7UNDPV0zYLzkrde/bjIqO02eoll0= -github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8/go.mod h1:gakxgyXaaPkxvLw1XQxNGK4I37ys9iBRzNUx/B7pUCo= +github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc= +github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+UIPD+Gww= github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764= github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U= github.com/chigopher/pathlib v0.19.1 h1:RoLlUJc0CqBGwq239cilyhxPNLXTK+HXoASGyGznx5A= github.com/chigopher/pathlib v0.19.1/go.mod h1:tzC1dZLW8o33UQpWkNkhvPwL5n4yyFRFm/jL1YGWFvY= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/ckaznocha/intrange v0.2.0 h1:FykcZuJ8BD7oX93YbO1UY9oZtkRbp+1/kJcDjkefYLs= +github.com/ckaznocha/intrange v0.2.0/go.mod h1:r5I7nUlAAG56xmkOpw4XVr16BXhwYTUdcuRFeevn1oE= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa h1:jQCWAUqqlij9Pgj2i/PB79y4KOPYVyFYdROxgaCwdTQ= -github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa/go.mod h1:x/1Gn8zydmfq8dk6e9PdstVsDgu9RuyIIJqAaF//0IM= github.com/coocood/freecache v1.1.1 h1:uukNF7QKCZEdZ9gAV7WQzvh0SbjwdMF6m3x3rxEkaPc= github.com/coocood/freecache v1.1.1/go.mod h1:OKrEjkGVoxZhyWAJoeFi5BMLUJm2Tit0kpGkIr7NGYY= github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/curioswitch/go-reassign v0.2.0 h1:G9UZyOcpk/d7Gd6mqYgd8XYWFMw/znxwGDUstnC9DIo= github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT84hqJfKoO8Tt/Roc= -github.com/daixiang0/gci v0.10.1 h1:eheNA3ljF6SxnPD/vE4lCBusVHmV3Rs3dkKvFrJ7MR0= -github.com/daixiang0/gci v0.10.1/go.mod h1:xtHP9N7AHdNvtRNfcx9gwTDfw7FRJx4bZUsiEfiNNAI= +github.com/daixiang0/gci v0.13.5 h1:kThgmH1yBmZSBCh1EJVxQ7JsHpm5Oms0AMed/0LaH4c= +github.com/daixiang0/gci v0.13.5/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk= github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denis-tingaikin/go-header v0.4.3 h1:tEaZKAlqql6SKCY++utLmkPLd6K8IBM20Ha7UVm+mtU= -github.com/denis-tingaikin/go-header v0.4.3/go.mod h1:0wOCWuN71D5qIgE2nz9KrKmuYBAC2Mra5RassOIQ2/c= +github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8= +github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY= github.com/emicklei/go-restful/v3 v3.9.0 h1:XwGDlfxEnQZzuopoqxwSEllNcCOM9DhhFyhFIIGKwxE= github.com/emicklei/go-restful/v3 v3.9.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.3.0-java/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v1.0.4 h1:gVPz/FMfvh57HdSJQyvBtF00j8JU4zdyUgIUNhlgg0A= github.com/envoyproxy/protoc-gen-validate v1.0.4/go.mod h1:qys6tmnRsYrQqIhm2bvKZH4Blx/1gTIZ2UKVY1M+Yew= github.com/ernesto-jimenez/gogen v0.0.0-20180125220232-d7d4131e6607 h1:cTavhURetDkezJCvxFggiyLeP40Mrk/TtVg2+ycw1Es= github.com/ernesto-jimenez/gogen v0.0.0-20180125220232-d7d4131e6607/go.mod h1:Cg4fM0vhYWOZdgM7RIOSTRNIc8/VT7CXClC3Ni86lu4= -github.com/esimonov/ifshort v1.0.4 h1:6SID4yGWfRae/M7hkVDVVyppy8q/v9OuxNdmjLQStBA= -github.com/esimonov/ifshort v1.0.4/go.mod h1:Pe8zjlRrJ80+q2CxHLfEOfTwxCZ4O+MuhcHcfgNWTk0= -github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw= -github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= +github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q= +github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A= github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= +github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/firefart/nonamedreturns v1.0.4 h1:abzI1p7mAEPYuR4A+VLKn4eNDOycjYo2phmY9sfv40Y= -github.com/firefart/nonamedreturns v1.0.4/go.mod h1:TDhe/tjI1BXo48CmYbUduTV7BdIga8MAO/xbKdcVsGI= +github.com/firefart/nonamedreturns v1.0.5 h1:tM+Me2ZaXs8tfdDw3X6DOX++wMCOqzYUho6tUTYIdRA= +github.com/firefart/nonamedreturns v1.0.5/go.mod h1:gHJjDqhGM4WyPt639SOZs+G89Ko7QKH5R5BhnO6xJhw= github.com/flyteorg/flyte/flytestdlib v1.11.0 h1:DxM/sf6H0ong8LIjgh0YwXK+abnGV8kWVi6EgfVCkO8= github.com/flyteorg/flyte/flytestdlib v1.11.0/go.mod h1:AmgNCq/tGEDwVfilW1nFtgPQn8vQ9gcDu6SNwz1YY+M= github.com/flyteorg/protoc-gen-doc v1.4.2 h1:Otw0F+RHaPQ8XlpzhLLgjsCMcrAIcMO01Zh+ALe3rrE= @@ -210,21 +174,14 @@ github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-critic/go-critic v0.8.1 h1:16omCF1gN3gTzt4j4J6fKI/HnRojhEp+Eks6EuKw3vw= -github.com/go-critic/go-critic v0.8.1/go.mod h1:kpzXl09SIJX1cr9TB/g/sAG+eFEl7ZS9f9cqvZtyNl0= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/ghostiam/protogetter v0.3.6 h1:R7qEWaSgFCsy20yYHNIJsU9ZOb8TziSRRxuAOTVKeOk= +github.com/ghostiam/protogetter v0.3.6/go.mod h1:7lpeDnEJ1ZjL/YtyoN99ljO4z0pd3H0d18/t2dPBxHw= +github.com/go-critic/go-critic v0.11.4 h1:O7kGOCx0NDIni4czrkRIXTnit0mkyKOCePh3My6OyEU= +github.com/go-critic/go-critic v0.11.4/go.mod h1:2QAdo4iuLik5S9YG0rT4wcZ8QxwHYkrr6/2MWAiv/vc= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY= -github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-logr/zapr v1.2.4 h1:QHVo+6stLbfJmYGkQ7uGHUCu5hnAFAj6mDe6Ea0SeOo= @@ -235,16 +192,19 @@ github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2Kv github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= +github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= +github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= +github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s= github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw= github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= -github.com/go-toolsmith/astequal v1.1.0 h1:kHKm1AWqClYn15R0K1KKE4RG614D46n+nqUQ06E1dTw= github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ= +github.com/go-toolsmith/astequal v1.2.0 h1:3Fs3CYZ1k9Vo4FzFhwwewC3CHISHDnVUPC4x0bI2+Cw= +github.com/go-toolsmith/astequal v1.2.0/go.mod h1:c8NZ3+kSFtFY/8lPso4v8LuJjdJiUFVnSuU3s0qrrDY= github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco= github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4= github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= @@ -256,37 +216,27 @@ github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQi github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= +github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w= +github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80U= github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= +github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= +github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -295,45 +245,35 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0= -github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= -github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe h1:6RGUuS7EGotKx6J5HIP8ZtyMdiDscjMLfRBSPuzVVeo= -github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe/go.mod h1:gjqyPShc/m8pEMpk0a3SeagVb0kaqvhscv+i9jI5ZhQ= -github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2 h1:amWTbTGqOZ71ruzrdA+Nx5WA3tV1N0goTspwmKCQvBY= -github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2/go.mod h1:9wOXstvyDRshQ9LggQuzBCGysxs3b6Uo/1MvYCR2NMs= -github.com/golangci/golangci-lint v1.53.3 h1:CUcRafczT4t1F+mvdkUm6KuOpxUZTl0yWN/rSU6sSMo= -github.com/golangci/golangci-lint v1.53.3/go.mod h1:W4Gg3ONq6p3Jl+0s/h9Gr0j7yEgHJWWZO2bHl2tBUXM= -github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= -github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= -github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= -github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= -github.com/golangci/misspell v0.4.0 h1:KtVB/hTK4bbL/S6bs64rYyk8adjmh1BygbBiaAiX+a0= -github.com/golangci/misspell v0.4.0/go.mod h1:W6O/bwV6lGDxUCChm2ykw9NQdd5bYd1Xkjo88UcWyJc= -github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 h1:DIPQnGy2Gv2FSA4B/hh8Q7xx3B7AIDk3DAMeHclH1vQ= -github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6/go.mod h1:0AKcRCkMoKvUvlf89F6O7H2LYdhr1zBh736mBItOdRs= -github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= -github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9 h1:/1322Qns6BtQxUZDTAT4SdcoxknUki7IAoK4SAXr8ME= +github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9/go.mod h1:Oesb/0uFAyWoaw1U1qS5zyjCg5NP9C9iwjnI4tIsXEE= +github.com/golangci/golangci-lint v1.61.0 h1:VvbOLaRVWmyxCnUIMTbf1kDsaJbTzH20FAMXTAlQGu8= +github.com/golangci/golangci-lint v1.61.0/go.mod h1:e4lztIrJJgLPhWvFPDkhiMwEFRrWlmFbrZea3FsJyN8= +github.com/golangci/misspell v0.6.0 h1:JCle2HUTNWirNlDIAUO44hUsKhOFqGPoC4LZxlaSXDs= +github.com/golangci/misspell v0.6.0/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo= +github.com/golangci/modinfo v0.3.4 h1:oU5huX3fbxqQXdfspamej74DFX0kyGLkw1ppvXoJ8GA= +github.com/golangci/modinfo v0.3.4/go.mod h1:wytF1M5xl9u0ij8YSvhkEVPP3M5Mc7XLl1pxH3B2aUM= +github.com/golangci/plugin-module-register v0.1.1 h1:TCmesur25LnyJkpsVrupv1Cdzo+2f7zX0H6Jkw1Ol6c= +github.com/golangci/plugin-module-register v0.1.1/go.mod h1:TTpqoB6KkwOJMV8u7+NyXMrkwwESJLOkfl9TxR1DGFc= +github.com/golangci/revgrep v0.5.3 h1:3tL7c1XBMtWHHqVpS5ChmiAAoe4PF/d5+ULzV9sLAzs= +github.com/golangci/revgrep v0.5.3/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k= +github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed h1:IURFTjxeTfNFP0hTEi1YKjB/ub8zkpaOqFFMApi2EAs= +github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed/go.mod h1:XLXN8bNw4CGRPaqgl3bv/lhz7bsGPh4/xSaMTbo2vkQ= github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= @@ -341,40 +281,22 @@ github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= -github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= -github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= +github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5 h1:5iH8iuqE5apketRbSFBy+X1V0o+l+8NF1avt4HWl7cA= +github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= +github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM= +github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA= github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= -github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= -github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601 h1:mrEEilTAUmaAORhssPPkxj84TsHrPMLBGW2Z4SoTxm8= -github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= +github.com/googleapis/enterprise-certificate-proxy v0.3.3 h1:QRje2j5GZimBzlbhGA2V2QlGNgL8G6e+wGo/+/2bWI0= +github.com/googleapis/enterprise-certificate-proxy v0.3.3/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA= +github.com/googleapis/gax-go/v2 v2.13.0 h1:yitjD5f7jQHhyDsnhKEBU52NdvvdSeGzlAnDPT0hH1s= +github.com/googleapis/gax-go/v2 v2.13.0/go.mod h1:Z/fvTZXF8/uw7Xu5GuslPw+bplx6SS338j1Is2S+B7A= +github.com/gordonklaus/ineffassign v0.1.0 h1:y2Gd/9I7MdY1oEIt+n+rowjBNDcLQq3RsH5hwJd0f9s= +github.com/gordonklaus/ineffassign v0.1.0/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= @@ -387,16 +309,9 @@ github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoISdUv3PPQgHY= github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= -github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= -github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= @@ -406,52 +321,40 @@ github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.6 h1:xTNEAn+kxVO7dTZGu0CegyqKZmoWFI0rF8UxjlB2d28= github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jgautheron/goconst v1.5.1 h1:HxVbL1MhydKs8R8n/HE5NPvzfaYmQJA3o879lE4+WcM= -github.com/jgautheron/goconst v1.5.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= +github.com/jgautheron/goconst v1.7.1 h1:VpdAG7Ca7yvvJk5n8dMwQhfEZJh95kl/Hl9S1OI5Jkk= +github.com/jgautheron/goconst v1.7.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg= github.com/jinzhu/copier v0.3.5/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48= github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= +github.com/jjti/go-spancheck v0.6.2 h1:iYtoxqPMzHUPp7St+5yA8+cONdyXD3ug6KK15n7Pklk= +github.com/jjti/go-spancheck v0.6.2/go.mod h1:+X7lvIrR5ZdUTkxFYqzJ0abr8Sb5LOo80uOhWNqIrYA= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY= github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= +github.com/karamaru-alpha/copyloopvar v1.1.0 h1:x7gNyKcC2vRBO1H2Mks5u1VxQtYvFiym7fCjIP8RPos= +github.com/karamaru-alpha/copyloopvar v1.1.0/go.mod h1:u7CIfztblY0jZLOQZgH3oYsJzpC2A7S6u/lfgSXHy0k= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/errcheck v1.6.3 h1:dEKh+GLHcWm2oN34nMvDzn1sqI0i0WxPvrgiJA5JuM8= -github.com/kisielk/errcheck v1.6.3/go.mod h1:nXw/i/MfnvRHqXa7XXmQMUB0oNFGuBrNI8d8NLy0LPw= -github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= +github.com/kisielk/errcheck v1.7.0 h1:+SbscKmWJ5mOK/bO1zS60F5I9WwZDWOfRsC4RwfwRV0= +github.com/kisielk/errcheck v1.7.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kkHAIKE/contextcheck v1.1.4 h1:B6zAaLhOEEcjvUgIYEqystmnFk1Oemn8bvJhbt0GMb8= -github.com/kkHAIKE/contextcheck v1.1.4/go.mod h1:1+i/gWqokIa+dm31mqGLZhZJ7Uh44DJGZVmr6QRBNJg= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kkHAIKE/contextcheck v1.1.5 h1:CdnJh63tcDe53vG+RebdpdXJTc9atMgGqdx8LXxiilg= +github.com/kkHAIKE/contextcheck v1.1.5/go.mod h1:O930cpht4xb1YQpK+1+AgoM3mFsvxr7uyFptcnWTYUA= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= @@ -461,20 +364,24 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs= github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I= -github.com/kunwardeep/paralleltest v1.0.7 h1:2uCk94js0+nVNQoHZNLBkAR1DQJrVzw6T0RMzJn55dQ= -github.com/kunwardeep/paralleltest v1.0.7/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY= +github.com/kunwardeep/paralleltest v1.0.10 h1:wrodoaKYzS2mdNVnc4/w31YaXFtsc21PCTdvWJ/lDDs= +github.com/kunwardeep/paralleltest v1.0.10/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ= github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA= -github.com/ldez/gomoddirectives v0.2.3 h1:y7MBaisZVDYmKvt9/l1mjNCiSA1BVn34U0ObUcJwlhA= -github.com/ldez/gomoddirectives v0.2.3/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= +github.com/lasiar/canonicalheader v1.1.1 h1:wC+dY9ZfiqiPwAexUApFush/csSPXeIi4QqyxXmng8I= +github.com/lasiar/canonicalheader v1.1.1/go.mod h1:cXkb3Dlk6XXy+8MVQnF23CYKWlyA7kfQhSw2CcZtZb0= +github.com/ldez/gomoddirectives v0.2.4 h1:j3YjBIjEBbqZ0NKtBNzr8rtMHTOrLPeiwTkfUJZ3alg= +github.com/ldez/gomoddirectives v0.2.4/go.mod h1:oWu9i62VcQDYp9EQ0ONTfqLNh+mDLWWDO+SO0qSQw5g= github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo= github.com/ldez/tagliatelle v0.5.0/go.mod h1:rj1HmWiL1MiKQuOONhd09iySTEkUuE/8+5jtPYz9xa4= -github.com/leonklingele/grouper v1.1.1 h1:suWXRU57D4/Enn6pXR0QVqqWWrnJ9Osrz+5rjt8ivzU= -github.com/leonklingele/grouper v1.1.1/go.mod h1:uk3I3uDfi9B6PeUjsCKi6ndcf63Uy7snXgR4yDYQVDY= +github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY= +github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA= github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM= github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM= +github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk= +github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= @@ -492,17 +399,14 @@ github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxec github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= -github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo= -github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= -github.com/mgechev/revive v1.3.2 h1:Wb8NQKBaALBJ3xrrj4zpwJwqwNA6nDpyJSEQWcCka6U= -github.com/mgechev/revive v1.3.2/go.mod h1:UCLtc7o5vg5aXCwdUTU1kEBQ1v+YXPAkYDIDXbrs5I0= +github.com/mgechev/revive v1.3.9 h1:18Y3R4a2USSBF+QZKFQwVkBROUda7uoBlkEuBD+YD1A= +github.com/mgechev/revive v1.3.9/go.mod h1:+uxEIr5UH0TjXWHTno3xh4u7eg6jDpXKzQccA9UGhHU= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= @@ -510,140 +414,116 @@ github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/moricho/tparallel v0.3.1 h1:fQKD4U1wRMAYNngDonW5XupoB/ZGJHdpzrWqgyg9krA= -github.com/moricho/tparallel v0.3.1/go.mod h1:leENX2cUv7Sv2qDgdi0D0fCftN8fRC67Bcn8pqzeYNI= +github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI= +github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007 h1:28i1IjGcx8AofiB4N3q5Yls55VEaitzuEPkFJEVgGkA= github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007/go.mod h1:m2XC9Qq0AlmmVksL6FktJCdTYyLk7V3fKyp0sl1yWQo= github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U= github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE= -github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 h1:4kuARK6Y6FxaNu/BnU2OAaLF86eTVhP2hjTB6iMvItA= -github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= github.com/ncw/swift v1.0.53 h1:luHjjTNtekIEvHg5KdAFIBaH7bWfNkefwFnpDffSIks= github.com/ncw/swift v1.0.53/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= -github.com/nishanths/exhaustive v0.11.0 h1:T3I8nUGhl/Cwu5Z2hfc92l0e04D2GEW6e0l8pzda2l0= -github.com/nishanths/exhaustive v0.11.0/go.mod h1:RqwDsZ1xY0dNdqHho2z6X+bgzizwbLYOWnZbbl2wLB4= +github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg= +github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs= github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= -github.com/nunnatsa/ginkgolinter v0.12.1 h1:vwOqb5Nu05OikTXqhvLdHCGcx5uthIYIl0t79UVrERQ= -github.com/nunnatsa/ginkgolinter v0.12.1/go.mod h1:AK8Ab1PypVrcGUusuKD8RDcl2KgsIwvNaaxAlyHSzso= +github.com/nunnatsa/ginkgolinter v0.16.2 h1:8iLqHIZvN4fTLDC0Ke9tbSZVcyVHoBs0HIbnVSxfHJk= +github.com/nunnatsa/ginkgolinter v0.16.2/go.mod h1:4tWRinDN1FeJgU+iJANW/kz7xKN5nYRAOfJDQUS9dOQ= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/onsi/ginkgo/v2 v2.11.0 h1:WgqUCUt/lT6yXoQ8Wef0fsNn5cAuMK7+KT9UFRz2tcU= -github.com/onsi/ginkgo/v2 v2.11.0/go.mod h1:ZhrRA5XmEE3x3rhlzamx/JJvujdZoJ2uvgI7kR0iZvM= -github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= -github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M= -github.com/otiai10/copy v1.2.0 h1:HvG945u96iNadPoG2/Ja2+AUJeW5YuFQMixq9yirC+k= +github.com/onsi/ginkgo/v2 v2.20.2 h1:7NVCeyIWROIAheY21RLS+3j2bb52W0W82tkberYytp4= +github.com/onsi/ginkgo/v2 v2.20.2/go.mod h1:K9gyxPIlb+aIvnZ8bd9Ak+YP18w3APlR+5coaZoE2ag= +github.com/onsi/gomega v1.34.2 h1:pNCwDkzrsv7MS9kpaQvVb1aVLahQXyJ/Tv5oAZMI3i8= +github.com/onsi/gomega v1.34.2/go.mod h1:v1xfxRgk0KIsG+QOdm7p8UosrOzPYRo60fd3B/1Dukc= github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= +github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU= +github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w= github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/pascaldekloe/name v0.0.0-20180628100202-0fd16699aae1 h1:/I3lTljEEDNYLho3/FUB7iD/oc2cEFgVmbHzV+O0PtU= github.com/pascaldekloe/name v0.0.0-20180628100202-0fd16699aae1/go.mod h1:eD5JxqMiuNYyFNmyY9rkJ/slN8y59oEu4Ei7F8OoKWQ= -github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU= -github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v1.4.2 h1:CU+O4181IxFDdPH6t/HT7IiDj1I7zxNi1RIUxYwn8d0= -github.com/polyfloyd/go-errorlint v1.4.2/go.mod h1:k6fU/+fQe38ednoZS51T7gSIGQW1y94d6TkSr35OzH8= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/polyfloyd/go-errorlint v1.6.0 h1:tftWV9DE7txiFzPpztTAwyoRLKNj9gpVm2cg8/OwcYY= +github.com/polyfloyd/go-errorlint v1.6.0/go.mod h1:HR7u8wuP1kb1NeN1zqTd1ZMlqUKPPHF+Id4vIPvDqVw= +github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= +github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8= github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg= github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM= github.com/pseudomuto/protokit v0.2.0 h1:hlnBDcy3YEDXH7kc9gV+NLaN0cDzhDvD1s7Y6FZ8RpM= github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= -github.com/quasilyte/go-ruleguard v0.3.19 h1:tfMnabXle/HzOb5Xe9CUZYWXKfkS1KwRmZyPmD9nVcc= -github.com/quasilyte/go-ruleguard v0.3.19/go.mod h1:lHSn69Scl48I7Gt9cX3VrbsZYvYiBYszZOZW4A+oTEw= +github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 h1:+Wl/0aFp0hpuHM3H//KMft64WQ1yX9LdJY64Qm/gFCo= +github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI= +github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= +github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU= github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.29.0 h1:Zes4hju04hjbvkVkOhdl2HpZa+0PmVwigmo8XoORE5w= github.com/rs/zerolog v1.29.0/go.mod h1:NILgTygv/Uej1ra5XxGf82ZFSLk58MFGAUS2o6usyD0= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryancurrah/gomodguard v1.3.0 h1:q15RT/pd6UggBXVBuLps8BXRvl5GPBcwVA7BJHMLuTw= -github.com/ryancurrah/gomodguard v1.3.0/go.mod h1:ggBxb3luypPEzqVtq33ee7YSN35V28XeGnid8dnni50= -github.com/ryanrolds/sqlclosecheck v0.4.0 h1:i8SX60Rppc1wRuyQjMciLqIzV3xnoHB7/tXbr6RGYNI= -github.com/ryanrolds/sqlclosecheck v0.4.0/go.mod h1:TBRRjzL31JONc9i4XMinicuo+s+E8yKZ5FN8X3G6CKQ= +github.com/ryancurrah/gomodguard v1.3.5 h1:cShyguSwUEeC0jS7ylOiG/idnd1TpJ1LfHGpV3oJmPU= +github.com/ryancurrah/gomodguard v1.3.5/go.mod h1:MXlEPQRxgfPQa62O8wzK3Ozbkv9Rkqr+wKjSxTdsNJE= +github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU= +github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ= github.com/sanposhiho/wastedassign/v2 v2.0.7 h1:J+6nrY4VW+gC9xFzUc+XjPD3g3wF3je/NsJFwFK7Uxc= github.com/sanposhiho/wastedassign/v2 v2.0.7/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw= github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= -github.com/sashamelentyev/usestdlibvars v1.23.0 h1:01h+/2Kd+NblNItNeux0veSL5cBF1jbEOPrEhDzGYq0= -github.com/sashamelentyev/usestdlibvars v1.23.0/go.mod h1:YPwr/Y1LATzHI93CqoPUN/2BzGQ/6N/cl/KwgR0B/aU= -github.com/securego/gosec/v2 v2.16.0 h1:Pi0JKoasQQ3NnoRao/ww/N/XdynIB9NRYYZT5CyOs5U= -github.com/securego/gosec/v2 v2.16.0/go.mod h1:xvLcVZqUfo4aAQu56TNv7/Ltz6emAOQAEsrZrt7uGlI= +github.com/sashamelentyev/usestdlibvars v1.27.0 h1:t/3jZpSXtRPRf2xr0m63i32ZrusyurIGT9E5wAvXQnI= +github.com/sashamelentyev/usestdlibvars v1.27.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8= +github.com/securego/gosec/v2 v2.21.2 h1:deZp5zmYf3TWwU7A7cR2+SolbTpZ3HQiwFqnzQyEl3M= +github.com/securego/gosec/v2 v2.21.2/go.mod h1:au33kg78rNseF5PwPnTWhuYBFf534bvJRvOrgZ/bFzU= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= -github.com/sivchari/nosnakecase v1.7.0 h1:7QkpWIRMe8x25gckkFd2A5Pi6Ymo0qgr4JrhGt95do8= -github.com/sivchari/nosnakecase v1.7.0/go.mod h1:CwDzrzPea40/GB6uynrNLiorAlgFRvRbFSgJx2Gs+QY= -github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak= -github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= +github.com/sivchari/tenv v1.10.0 h1:g/hzMA+dBCKqGXgW8AV/1xIWhAvDrx0zFKNR48NFMg0= +github.com/sivchari/tenv v1.10.0/go.mod h1:tdY24masnVoZFxYrHv/nD6Tc8FbkEtAQEEziXpyMgqY= github.com/sonatard/noctx v0.0.2 h1:L7Dz4De2zDQhW8S0t+KUjY0MAQJd6SgVwhzNIc4ok00= github.com/sonatard/noctx v0.0.2/go.mod h1:kzFz+CzWSjQ2OzIm46uJZoXuBpa2+0y3T36U18dWqIo= github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= -github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= +github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= +github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= -github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= -github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= @@ -655,13 +535,11 @@ github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRk github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc= github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v0.0.0-20170130113145-4d4bfba8f1d1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -670,45 +548,43 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= -github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c h1:+aPplBwWcHBo6q9xrfWdMrT9o4kltkmmvpemgIjep/8= -github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c/go.mod h1:SbErYREK7xXdsRiigaQiQkI9McGRzYMvlKYaP3Nimdk= github.com/tdakkota/asciicheck v0.2.0 h1:o8jvnUANo0qXtnslk2d3nMKTFNlOnJjRrNcj0j9qkHM= github.com/tdakkota/asciicheck v0.2.0/go.mod h1:Qb7Y9EgjCLJGup51gDHFzbI08/gbGhL/UVhYIPWG2rg= github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= -github.com/tetafro/godot v1.4.11 h1:BVoBIqAf/2QdbFmSwAWnaIqDivZdOV0ZRwEm6jivLKw= -github.com/tetafro/godot v1.4.11/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= +github.com/tetafro/godot v1.4.17 h1:pGzu+Ye7ZUEFx7LHU0dAKmCOXWsPjl7qA6iMGndsjPs= +github.com/tetafro/godot v1.4.17/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 h1:quvGphlmUVU+nhpFa4gg4yJyTRJ13reZMDHrKwYw53M= github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ= github.com/timonwong/loggercheck v0.9.4 h1:HKKhqrjcVj8sxL7K77beXh0adEm6DLjV/QOGeMXEVi4= github.com/timonwong/loggercheck v0.9.4/go.mod h1:caz4zlPcgvpEkXgVnAJGowHAMW2NwHaNlpS8xDbVhTg= -github.com/tomarrell/wrapcheck/v2 v2.8.1 h1:HxSqDSN0sAt0yJYsrcYVoEeyM4aI9yAm3KQpIXDJRhQ= -github.com/tomarrell/wrapcheck/v2 v2.8.1/go.mod h1:/n2Q3NZ4XFT50ho6Hbxg+RV1uyo2Uow/Vdm9NQcl5SE= +github.com/tomarrell/wrapcheck/v2 v2.9.0 h1:801U2YCAjLhdN8zhZ/7tdjB3EnAoRlJHt/s+9hijLQ4= +github.com/tomarrell/wrapcheck/v2 v2.9.0/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo= github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= -github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= -github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= -github.com/ultraware/whitespace v0.0.5 h1:hh+/cpIcopyMYbZNVov9iSxvJU3OYQg78Sfaqzi/CzI= -github.com/ultraware/whitespace v0.0.5/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= -github.com/uudashr/gocognit v1.0.6 h1:2Cgi6MweCsdB6kpcVQp7EW4U23iBFQWfTXiWlyp842Y= -github.com/uudashr/gocognit v1.0.6/go.mod h1:nAIUuVBnYU7pcninia3BHOvQkpQCeO76Uscky5BOwcY= +github.com/ultraware/funlen v0.1.0 h1:BuqclbkY6pO+cvxoq7OsktIXZpgBSkYTQtmwhAK81vI= +github.com/ultraware/funlen v0.1.0/go.mod h1:XJqmOQja6DpxarLj6Jj1U7JuoS8PvL4nEqDaQhy22p4= +github.com/ultraware/whitespace v0.1.1 h1:bTPOGejYFulW3PkcrqkeQwOd6NKOOXvmGD9bo/Gk8VQ= +github.com/ultraware/whitespace v0.1.1/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= +github.com/uudashr/gocognit v1.1.3 h1:l+a111VcDbKfynh+airAy/DJQKaXh2m9vkoysMPSZyM= +github.com/uudashr/gocognit v1.1.3/go.mod h1:aKH8/e8xbTRBwjbCkwZ8qt4l2EpKXl31KMHgSS+lZ2U= github.com/vektra/mockery/v2 v2.40.3 h1:IZ2lydSDFsY0khnEsbSu13VLcqSsa6UYSS/8F+uOJmo= github.com/vektra/mockery/v2 v2.40.3/go.mod h1:KYBZF/7sqOa86BaOZPYsoCZWEWLS90a5oBLg2pVudxY= -github.com/xen0n/gosmopolitan v1.2.1 h1:3pttnTuFumELBRSh+KQs1zcz4fN6Zy7aB0xlnQSn1Iw= -github.com/xen0n/gosmopolitan v1.2.1/go.mod h1:JsHq/Brs1o050OOdmzHeOr0N7OtlnKRAGAsElF8xBQA= +github.com/xen0n/gosmopolitan v1.2.2 h1:/p2KTnMzwRexIW8GlKawsTWOxn7UHA+jCMF/V8HHtvU= +github.com/xen0n/gosmopolitan v1.2.2/go.mod h1:7XX7Mj61uLYrj0qmeN0zi7XDon9JRAEhYQqAPLVNTeg= github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= -github.com/yeya24/promlinter v0.2.0 h1:xFKDQ82orCU5jQujdaD8stOHiv8UN68BSdn2a8u8Y3o= -github.com/yeya24/promlinter v0.2.0/go.mod h1:u54lkmBOZrpEbQQ6gox2zWKKLKu2SGe+2KOiextY+IA= -github.com/ykadowak/zerologlint v0.1.2 h1:Um4P5RMmelfjQqQJKtE8ZW+dLZrXrENeIzWWKw800U4= -github.com/ykadowak/zerologlint v0.1.2/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg= +github.com/yeya24/promlinter v0.3.0 h1:JVDbMp08lVCP7Y6NP3qHroGAO6z2yGKQtS5JsjqtoFs= +github.com/yeya24/promlinter v0.3.0/go.mod h1:cDfJQQYv9uYciW60QT0eeHlFodotkYZlL+YcPQN+mW4= +github.com/ykadowak/zerologlint v0.1.5 h1:Gy/fMz1dFQN9JZTPjv1hxEk+sRWm05row04Yoolgdiw= +github.com/ykadowak/zerologlint v0.1.5/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -716,36 +592,34 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -gitlab.com/bosi/decorder v0.2.3 h1:gX4/RgK16ijY8V+BRQHAySfQAb354T7/xQpDB2n10P0= -gitlab.com/bosi/decorder v0.2.3/go.mod h1:9K1RB5+VPNQYtXtTDAzd2OEftsZb1oV0IrJrzChSdGE= -go-simpler.org/assert v0.5.0 h1:+5L/lajuQtzmbtEfh69sr5cRf2/xZzyJhFjoOz/PPqs= -go-simpler.org/assert v0.5.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo= +gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8= +go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ= +go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= +go-simpler.org/musttag v0.12.2 h1:J7lRc2ysXOq7eM8rwaTYnNrHd5JwjppzB6mScysB2Cs= +go-simpler.org/musttag v0.12.2/go.mod h1:uN1DVIasMTQKk6XSik7yrJoEysGtR2GRqvWnI9S7TYM= +go-simpler.org/sloglint v0.7.2 h1:Wc9Em/Zeuu7JYpl+oKoYOsQSy2X560aVueCW/m6IijY= +go-simpler.org/sloglint v0.7.2/go.mod h1:US+9C80ppl7VsThQclkM7BkCHQAzuz8kHLsW3ppuluo= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 h1:SpGay3w+nEwMpfVnbqOLH5gY52/foP8RE8UzTZ1pdSE= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1/go.mod h1:4UoMYEZOC0yN/sPGH76KPkkU7zgiEWYWL9vwmbnTJPE= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 h1:aFJWCqJMNjENlcleuuOkGAPH82y0yULBScfXcIEdS24= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1/go.mod h1:sEGXWArGqc3tVa+ekntsN65DmVbVeW+7lTKTjZF3/Fo= -go.opentelemetry.io/otel v1.21.0 h1:hzLeKBZEL7Okw2mGzZ0cc4k/A7Fta0uoPgaJCr8fsFc= -go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 h1:r6I7RJCN86bpD/FQwedZ0vSixDpwuWREjW9oRMsmqDc= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0/go.mod h1:B9yO6b04uB80CzjedvewuqDhxJxi11s7/GtiGa8bAjI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8= +go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw= +go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8= go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4= go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0 h1:Nw7Dv4lwvGrI68+wULbcq7su9K2cebeCUrDjVrUJHxM= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.19.0/go.mod h1:1MsF6Y7gTqosgoZvHlzcaaM8DIMNZgJh87ykokoNH7Y= -go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4= -go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM= -go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8= -go.opentelemetry.io/otel/sdk v1.21.0/go.mod h1:Nna6Yv7PWTdgJHVRD9hIYywQBRx7pbox6nwBnZIxl/E= -go.opentelemetry.io/otel/trace v1.21.0 h1:WD9i5gzvoUPuXIXH24ZNBudiarZDKuekPqi/E8fpfLc= -go.opentelemetry.io/otel/trace v1.21.0/go.mod h1:LGbsEB0f9LGjN+OZaQQ26sohbOmiMR+BaslueVtS/qQ= -go.tmz.dev/musttag v0.7.0 h1:QfytzjTWGXZmChoX0L++7uQN+yRCPfyFm+whsM+lfGc= -go.tmz.dev/musttag v0.7.0/go.mod h1:oTFPvgOkJmp5kYL02S8+jrH0eLrBIl57rzWeA26zDEM= +go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc= +go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8= +go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= +go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= +go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4= +go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ= +go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= +go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= @@ -753,56 +627,25 @@ go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN8 go.uber.org/zap v1.25.0 h1:4Hvk6GtkucQ790dqmj7l1eEnRdKm3k3ZUrUMS2d5+5c= go.uber.org/zap v1.25.0/go.mod h1:JIAUzQIH94IC4fOJQm7gMmBJP5k7wQfdcnYdPoEXJYk= golang.org/x/crypto v0.0.0-20180501155221-613d6eafa307/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= -golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30= -golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M= +golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= +golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea h1:vLCWI/yYrdEHyN2JzIzPO3aaQJHQdp89IZBA/+azVC4= -golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w= +golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e h1:I88y4caeGeuDQxgdoFPUq097j7kNfw6uvuiNxUBfcBk= +golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20230224173230-c95f2b4c22f2 h1:J74nGeMgeFnYQJN59eFwh06jX/V8g0lB7LWpjSLxtgU= -golang.org/x/exp/typeparams v0.0.0-20230224173230-c95f2b4c22f2/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f h1:phY1HzDcf18Aq9A8KkmRtY9WvOFIxN8wgfvy6Zm1DV8= +golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= @@ -811,135 +654,63 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91 golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= -golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= +golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys= -golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= +golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= +golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ= -golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= +golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA= +golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= -golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220702020025-31831981b65f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -948,93 +719,46 @@ golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= -golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk= -golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM= +golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= -golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= -golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U= +golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524210228-3d17549cdc6b/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= @@ -1042,113 +766,40 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= -golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24= +golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= gomodules.xyz/jsonpatch/v2 v2.4.0 h1:Ci3iUJyx9UeRx7CeFN8ARgGbkESwJK+KB9lLcWxY/Zw= gomodules.xyz/jsonpatch/v2 v2.4.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.155.0 h1:vBmGhCYs0djJttDNynWo44zosHlPvHmA0XiN2zP2DtA= -google.golang.org/api v0.155.0/go.mod h1:GI5qK5f40kCpHfPn6+YzGAByIKWv8ujFnmoWm7Igduk= +google.golang.org/api v0.196.0 h1:k/RafYqebaIJBO3+SMnfEGtFVlvp5vSgqTUF54UN/zg= +google.golang.org/api v0.196.0/go.mod h1:g9IL21uGkYgvQ5BZg6BAtoGJQIm8r6EgaAbpNey5wBE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= -google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20181107211654-5fc9ac540362/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80 h1:KAeGQVN3M9nD0/bQXnr/ClcEMJ968gUXJQ9pwfSynuQ= -google.golang.org/genproto v0.0.0-20240123012728-ef4313101c80/go.mod h1:cc8bqMqtv9gMOr0zHg2Vzff5ULhhL2IXP4sbcn32Dro= -google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80 h1:Lj5rbfG876hIAYFjqiJnPHfhXbv+nzTWfm04Fg/XSVU= -google.golang.org/genproto/googleapis/api v0.0.0-20240123012728-ef4313101c80/go.mod h1:4jWUdICTdgc3Ibxmr8nAJiiLHwQBY0UI0XZcEMaFKaA= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80 h1:AjyfHzEPEFp/NpvfN5g+KDla3EMojjhRVZc1i7cj+oM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240123012728-ef4313101c80/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= +google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1 h1:BulPr26Jqjnd4eYDVe+YvyR7Yc2vJGkO5/0UxD0/jZU= +google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:hL97c3SYopEHblzpxRL4lSs523++l8DYxGM1FQiYmb4= +google.golang.org/genproto/googleapis/api v0.0.0-20240827150818-7e3bb234dfed h1:3RgNmBoI9MZhsj3QxC+AP/qQhNwpCLOvYDYYsFrhFt0= +google.golang.org/genproto/googleapis/api v0.0.0-20240827150818-7e3bb234dfed/go.mod h1:OCdP9MfskevB/rbYvHTsXTtKC+3bHWajPdoKgjcYkfo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk= -google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE= +google.golang.org/grpc v1.66.0 h1:DibZuoBznOxbDQxRINckZcUvnCEvrW9pcWIE2yF9r1c= +google.golang.org/grpc v1.66.0/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -1157,43 +808,27 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.4.3 h1:o/n5/K5gXqk8Gozvs2cnL0F2S1/g1vcGCAx2vETjITw= -honnef.co/go/tools v0.4.3/go.mod h1:36ZgoUOrqOk1GxwHhyryEkq8FQWkUO2xGuSMhUCcdvA= +honnef.co/go/tools v0.5.1 h1:4bH5o3b5ZULQ4UrBmP+63W9r7qIkqJClEA9ko5YKx+I= +honnef.co/go/tools v0.5.1/go.mod h1:e9irvo83WDG9/irijV44wr3tbhcFeRnfpVlRqVwpzMs= k8s.io/api v0.28.2 h1:9mpl5mOb6vXZvqbQmankOfPIGiudghwCoLl1EYfUZbw= k8s.io/api v0.28.2/go.mod h1:RVnJBsjU8tcMq7C3iaRSGMeaKt2TWEUXcpIt/90fjEg= k8s.io/apiextensions-apiserver v0.28.0 h1:CszgmBL8CizEnj4sj7/PtLGey6Na3YgWyGCPONv7E9E= @@ -1210,17 +845,10 @@ k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9 h1:LyMgNKD2P8Wn1iAwQU5Ohx k8s.io/kube-openapi v0.0.0-20230717233707-2695361300d9/go.mod h1:wZK2AVp1uHCp4VamDVgBP2COHZjqD1T68Rf0CM3YjSM= k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 h1:qY1Ad8PODbnymg2pRbkyMT/ylpTrCM8P2RJ0yroCyIk= k8s.io/utils v0.0.0-20230406110748-d93618cff8a2/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -mvdan.cc/gofumpt v0.5.0 h1:0EQ+Z56k8tXjj/6TQD25BFNKQXpCvT0rnansIc7Ug5E= -mvdan.cc/gofumpt v0.5.0/go.mod h1:HBeVDtMKRZpXyxFciAirzdKklDlGu8aAy1wEbH5Y9js= -mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= -mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= -mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= -mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= -mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d h1:3rvTIIM22r9pvXk+q3swxUQAQOxksVMGK7sml4nG57w= -mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d/go.mod h1:IeHQjmn6TOD+e4Z3RFiZMMsLVL+A96Nvptar8Fj71is= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU= +mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo= +mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f h1:lMpcwN6GxNbWtbpI1+xzFLSW8XzX0u72NttUGVFjO3U= +mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f/go.mod h1:RSLa7mKKCNeTTMHBw5Hsy2rfJmd6O2ivt9Dw9ZqCQpQ= sigs.k8s.io/controller-runtime v0.16.2 h1:mwXAVuEk3EQf478PQwQ48zGOXvW27UJc8NHktQVuIPU= sigs.k8s.io/controller-runtime v0.16.2/go.mod h1:vpMu3LpI5sYWtujJOa2uPK61nB5rbwlN7BAB8aSLvGU= sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= diff --git a/boilerplate/flyte/golang_test_targets/Makefile b/boilerplate/flyte/golang_test_targets/Makefile index c02409a318..6492014917 100644 --- a/boilerplate/flyte/golang_test_targets/Makefile +++ b/boilerplate/flyte/golang_test_targets/Makefile @@ -15,7 +15,7 @@ generate: download_tooling #generate go code .PHONY: lint lint: download_tooling #lints the package for common code smells - GL_DEBUG=linters_output,env golangci-lint run $(LINT_FLAGS) --deadline=5m --exclude deprecated -v + GL_DEBUG=linters_output,env golangci-lint run $(LINT_FLAGS) --timeout=5m --exclude deprecated -v .PHONY: lint-fix lint-fix: LINT_FLAGS=--fix diff --git a/charts/flyte-binary/README.md b/charts/flyte-binary/README.md index e7df1018db..932933993e 100644 --- a/charts/flyte-binary/README.md +++ b/charts/flyte-binary/README.md @@ -41,7 +41,7 @@ Chart for basic single Flyte executable deployment | configuration.auth.oidc.clientId | string | `""` | | | configuration.auth.oidc.clientSecret | string | `""` | | | configuration.co-pilot.image.repository | string | `"cr.flyte.org/flyteorg/flytecopilot"` | | -| configuration.co-pilot.image.tag | string | `"v1.13.2"` | | +| configuration.co-pilot.image.tag | string | `"v1.14.1"` | | | configuration.database.dbname | string | `"flyte"` | | | configuration.database.host | string | `"127.0.0.1"` | | | configuration.database.options | string | `"sslmode=disable"` | | @@ -63,6 +63,8 @@ Chart for basic single Flyte executable deployment | configuration.logging.plugins.kubernetes.templateUri | string | `""` | | | configuration.logging.plugins.stackdriver.enabled | bool | `false` | | | configuration.logging.plugins.stackdriver.templateUri | string | `""` | | +| configuration.propeller.createCRDs | bool | `true` | | +| configuration.propeller.literalOffloadingConfigEnabled | bool | `true` | | | configuration.storage.metadataContainer | string | `"my-organization-flyte-container"` | | | configuration.storage.provider | string | `"s3"` | | | configuration.storage.providerConfig.azure.account | string | `"storage-account-name"` | | diff --git a/charts/flyte-binary/templates/configmap.yaml b/charts/flyte-binary/templates/configmap.yaml index 255da9fdf3..face6608f1 100644 --- a/charts/flyte-binary/templates/configmap.yaml +++ b/charts/flyte-binary/templates/configmap.yaml @@ -40,7 +40,11 @@ data: show-source: true level: {{ default 1 .Values.configuration.logging.level }} propeller: - create-flyteworkflow-crd: true + create-flyteworkflow-crd: {{ .Values.configuration.propeller.createCRDs }} + {{- if .Values.configuration.propeller.literalOffloadingConfigEnabled }} + literal-offloading-config: + enabled: true + {{- end}} webhook: certDir: /var/run/flyte/certs localCert: true diff --git a/charts/flyte-binary/templates/crds/flyteworkflow.yaml b/charts/flyte-binary/templates/crds/flyteworkflow.yaml new file mode 100644 index 0000000000..3e5167d6ac --- /dev/null +++ b/charts/flyte-binary/templates/crds/flyteworkflow.yaml @@ -0,0 +1,32 @@ +{{- if not .Values.configuration.propeller.createCRDs }} +{{- if $.Capabilities.APIVersions.Has "apiextensions.k8s.io/v1/CustomResourceDefinition" }} +apiVersion: apiextensions.k8s.io/v1 +{{- else }} +apiVersion: apiextensions.k8s.io/v1beta1 +{{- end }} +kind: CustomResourceDefinition +metadata: + name: flyteworkflows.flyte.lyft.com +spec: + group: flyte.lyft.com + names: + kind: FlyteWorkflow + plural: flyteworkflows + shortNames: + - fly + singular: flyteworkflow + scope: Namespaced +{{- if $.Capabilities.APIVersions.Has "apiextensions.k8s.io/v1/CustomResourceDefinition" }} + versions: + - name: v1alpha1 + served: true + storage: true + schema: + openAPIV3Schema: + type: object + x-kubernetes-preserve-unknown-fields: true + properties: +{{- else }} + version: v1alpha1 +{{- end }} +{{- end }} diff --git a/charts/flyte-binary/values.yaml b/charts/flyte-binary/values.yaml index eee01d16c6..304afd7d4d 100644 --- a/charts/flyte-binary/values.yaml +++ b/charts/flyte-binary/values.yaml @@ -167,7 +167,7 @@ configuration: # repository CoPilot sidecar image repository repository: cr.flyte.org/flyteorg/flytecopilot # FLYTECOPILOT_IMAGE # tag CoPilot sidecar image tag - tag: v1.13.2 # FLYTECOPILOT_TAG + tag: v1.14.1 # FLYTECOPILOT_TAG # agentService Flyte Agent configuration agentService: defaultAgent: @@ -176,6 +176,13 @@ configuration: timeouts: GetTask: 10s defaultTimeout: 10s + # propeller Specify configuration for Flyte Propeller + propeller: + # createCRDs If true, Propeller will install CRDs at runtime, if false, CRDs will be installed during helm install + createCRDs: true + # enableOffloading If true, big literals are offloaded to blob store + literalOffloadingConfigEnabled: true + # externalConfigMap Specify an existing, external ConfigMap to use as configuration for Flyte # If set, no Flyte configuration will be generated by this chart externalConfigMap: "" diff --git a/charts/flyte-core/README.md b/charts/flyte-core/README.md index 6aed892810..a2fbd73276 100644 --- a/charts/flyte-core/README.md +++ b/charts/flyte-core/README.md @@ -110,11 +110,11 @@ helm install gateway bitnami/contour -n flyte | configmap.clusters.clusterConfigs | list | `[]` | | | configmap.clusters.labelClusterMap | object | `{}` | | | configmap.console | object | `{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"}` | Configuration for Flyte console UI | -| configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.13.2","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | -| configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.13.2","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | -| configmap.core | object | `{"manager":{"pod-application":"flytepropeller","pod-template-container-name":"flytepropeller","pod-template-name":"flytepropeller-template"},"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}}` | Core propeller configuration | +| configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.14.1","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | +| configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.14.1","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | +| configmap.core | object | `{"manager":{"pod-application":"flytepropeller","pod-template-container-name":"flytepropeller","pod-template-name":"flytepropeller-template"},"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","literal-offloading-config":{"enabled":true},"max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}}` | Core propeller configuration | | configmap.core.manager | object | `{"pod-application":"flytepropeller","pod-template-container-name":"flytepropeller","pod-template-name":"flytepropeller-template"}` | follows the structure specified [here](https://pkg.go.dev/github.com/flyteorg/flytepropeller/manager/config#Config). | -| configmap.core.propeller | object | `{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"}` | follows the structure specified [here](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/config). | +| configmap.core.propeller | object | `{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","literal-offloading-config":{"enabled":true},"max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"}` | follows the structure specified [here](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/config). | | configmap.datacatalogServer | object | `{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"heartbeat-grace-period-multiplier":3,"max-reservation-heartbeat":"30s","metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}}` | Datacatalog server config | | configmap.domain | object | `{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]}` | Domains configuration for Flyte projects. This enables the specified number of domains across all projects in Flyte. | | configmap.enabled_plugins.tasks | object | `{"task-plugins":{"default-for-task-types":{"container":"container","container_array":"k8s-array","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service","echo"]}}` | Tasks specific configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#GetConfig) | @@ -145,7 +145,7 @@ helm install gateway bitnami/contour -n flyte | datacatalog.extraArgs | object | `{}` | Appends extra command line arguments to the main command | | datacatalog.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | datacatalog.image.repository | string | `"cr.flyte.org/flyteorg/datacatalog"` | Docker image for Datacatalog deployment | -| datacatalog.image.tag | string | `"v1.13.2"` | Docker image tag | +| datacatalog.image.tag | string | `"v1.14.1"` | Docker image tag | | datacatalog.nodeSelector | object | `{}` | nodeSelector for Datacatalog deployment | | datacatalog.podAnnotations | object | `{}` | Annotations for Datacatalog pods | | datacatalog.podEnv | object | `{}` | Additional Datacatalog container environment variables | @@ -178,10 +178,11 @@ helm install gateway bitnami/contour -n flyte | flyteadmin.configPath | string | `"/etc/flyte/config/*.yaml"` | Default regex string for searching configuration files | | flyteadmin.enabled | bool | `true` | | | flyteadmin.env | list | `[]` | Additional flyteadmin container environment variables e.g. SendGrid's API key - name: SENDGRID_API_KEY value: "" e.g. secret environment variable (you can combine it with .additionalVolumes): - name: SENDGRID_API_KEY valueFrom: secretKeyRef: name: sendgrid-secret key: api_key | +| flyteadmin.envFrom | list | `[]` | Additional flyteadmin environment variables from a reference (ie: Secret or ConfigMap) | | flyteadmin.extraArgs | object | `{}` | Appends extra command line arguments to the serve command | | flyteadmin.image.pullPolicy | string | `"IfNotPresent"` | | | flyteadmin.image.repository | string | `"cr.flyte.org/flyteorg/flyteadmin"` | Docker image for Flyteadmin deployment | -| flyteadmin.image.tag | string | `"v1.13.2"` | | +| flyteadmin.image.tag | string | `"v1.14.1"` | | | flyteadmin.initialProjects | list | `["flytesnacks","flytetester","flyteexamples"]` | Initial projects to create | | flyteadmin.nodeSelector | object | `{}` | nodeSelector for Flyteadmin deployment | | flyteadmin.podAnnotations | object | `{}` | Annotations for Flyteadmin pods | @@ -191,7 +192,7 @@ helm install gateway bitnami/contour -n flyte | flyteadmin.resources | object | `{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}}` | Default resources requests and limits for Flyteadmin deployment | | flyteadmin.secrets | object | `{}` | | | flyteadmin.securityContext | object | `{"fsGroup":65534,"fsGroupChangePolicy":"Always","runAsNonRoot":true,"runAsUser":1001,"seLinuxOptions":{"type":"spc_t"}}` | Sets securityContext for flyteadmin pod(s). | -| flyteadmin.service | object | `{"additionalPorts":[],"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"loadBalancerSourceRanges":[],"type":"ClusterIP"}` | Service settings for Flyteadmin | +| flyteadmin.service | object | `{"additionalPorts":[],"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"appProtocols":{"enabled":false},"loadBalancerSourceRanges":[],"type":"ClusterIP"}` | Service settings for Flyteadmin | | flyteadmin.service.additionalPorts | list | `[]` | Appends additional ports to the service spec. | | flyteadmin.serviceAccount | object | `{"alwaysCreate":false,"annotations":{},"clusterRole":{"apiGroups":["","flyte.lyft.com","rbac.authorization.k8s.io"],"resources":["configmaps","flyteworkflows","namespaces","pods","resourcequotas","roles","rolebindings","secrets","services","serviceaccounts","spark-role","limitranges"],"verbs":["*"]},"create":true,"createClusterRole":true,"imagePullSecrets":[]}` | Configuration for service accounts for FlyteAdmin | | flyteadmin.serviceAccount.alwaysCreate | bool | `false` | Should a service account always be created for flyteadmin even without an actual flyteadmin deployment running (e.g. for multi-cluster setups) | @@ -222,7 +223,7 @@ helm install gateway bitnami/contour -n flyte | flyteconsole.ga.tracking_id | string | `"G-0QW4DJWJ20"` | | | flyteconsole.image.pullPolicy | string | `"IfNotPresent"` | | | flyteconsole.image.repository | string | `"cr.flyte.org/flyteorg/flyteconsole"` | Docker image for Flyteconsole deployment | -| flyteconsole.image.tag | string | `"v1.17.1"` | | +| flyteconsole.image.tag | string | `"v1.19.0"` | | | flyteconsole.imagePullSecrets | list | `[]` | ImagePullSecrets to assign to the Flyteconsole deployment | | flyteconsole.livenessProbe | object | `{}` | | | flyteconsole.nodeSelector | object | `{}` | nodeSelector for Flyteconsole deployment | @@ -234,7 +235,7 @@ helm install gateway bitnami/contour -n flyte | flyteconsole.replicaCount | int | `1` | Replicas count for Flyteconsole deployment | | flyteconsole.resources | object | `{"limits":{"cpu":"500m","memory":"250Mi"},"requests":{"cpu":"10m","memory":"50Mi"}}` | Default resources requests and limits for Flyteconsole deployment | | flyteconsole.securityContext | object | `{"fsGroupChangePolicy":"OnRootMismatch","runAsNonRoot":true,"runAsUser":1000,"seLinuxOptions":{"type":"spc_t"}}` | Sets securityContext for flyteconsole pod(s). | -| flyteconsole.service | object | `{"annotations":{},"type":"ClusterIP"}` | Service settings for Flyteconsole | +| flyteconsole.service | object | `{"annotations":{},"appProtocols":{"enabled":false},"type":"ClusterIP"}` | Service settings for Flyteconsole | | flyteconsole.serviceMonitor | object | `{"enabled":false,"interval":"60s","labels":{},"scrapeTimeout":"30s"}` | Settings for flyteconsole service monitor | | flyteconsole.serviceMonitor.enabled | bool | `false` | If enabled create the flyteconsole service monitor | | flyteconsole.serviceMonitor.interval | string | `"60s"` | Sets the interval at which metrics will be scraped by prometheus | @@ -252,7 +253,7 @@ helm install gateway bitnami/contour -n flyte | flytepropeller.extraArgs | object | `{}` | Appends extra command line arguments to the main command | | flytepropeller.image.pullPolicy | string | `"IfNotPresent"` | | | flytepropeller.image.repository | string | `"cr.flyte.org/flyteorg/flytepropeller"` | Docker image for Flytepropeller deployment | -| flytepropeller.image.tag | string | `"v1.13.2"` | | +| flytepropeller.image.tag | string | `"v1.14.1"` | | | flytepropeller.manager | bool | `false` | | | flytepropeller.nodeSelector | object | `{}` | nodeSelector for Flytepropeller deployment | | flytepropeller.podAnnotations | object | `{}` | Annotations for Flytepropeller pods | @@ -284,7 +285,7 @@ helm install gateway bitnami/contour -n flyte | flytescheduler.configPath | string | `"/etc/flyte/config/*.yaml"` | Default regex string for searching configuration files | | flytescheduler.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flytescheduler.image.repository | string | `"cr.flyte.org/flyteorg/flytescheduler"` | Docker image for Flytescheduler deployment | -| flytescheduler.image.tag | string | `"v1.13.2"` | Docker image tag | +| flytescheduler.image.tag | string | `"v1.14.1"` | Docker image tag | | flytescheduler.nodeSelector | object | `{}` | nodeSelector for Flytescheduler deployment | | flytescheduler.podAnnotations | object | `{}` | Annotations for Flytescheduler pods | | flytescheduler.podEnv | object | `{}` | Additional Flytescheduler container environment variables | diff --git a/charts/flyte-core/templates/admin/deployment.yaml b/charts/flyte-core/templates/admin/deployment.yaml index 23ea9966df..cc6877a793 100755 --- a/charts/flyte-core/templates/admin/deployment.yaml +++ b/charts/flyte-core/templates/admin/deployment.yaml @@ -46,7 +46,13 @@ spec: {{- with .Values.flyteadmin.env -}} {{- tpl (toYaml .) $ | nindent 12 }} {{- end }} + {{- end }} + {{- if .Values.flyteadmin.envFrom }} + envFrom: + {{- with .Values.flyteadmin.envFrom -}} + {{- tpl (toYaml .) $ | nindent 12 }} {{- end }} + {{- end }} {{- if .Values.flyteadmin.initialProjects }} - command: - flyteadmin diff --git a/charts/flyte-core/templates/admin/service.yaml b/charts/flyte-core/templates/admin/service.yaml index 9974fcdc4d..26e85c97f2 100644 --- a/charts/flyte-core/templates/admin/service.yaml +++ b/charts/flyte-core/templates/admin/service.yaml @@ -20,22 +20,30 @@ spec: - name: http port: 80 protocol: TCP + {{- if .Values.flyteadmin.service.appProtocols.enabled }} appProtocol: TCP + {{- end }} targetPort: 8088 - name: grpc port: 81 protocol: TCP # intentionally set to TCP instead of grpc + {{- if .Values.flyteadmin.service.appProtocols.enabled }} appProtocol: TCP + {{- end }} targetPort: 8089 - name: redoc protocol: TCP + {{- if .Values.flyteadmin.service.appProtocols.enabled }} appProtocol: TCP + {{- end }} port: 87 targetPort: 8087 - name: http-metrics protocol: TCP + {{- if .Values.flyteadmin.service.appProtocols.enabled }} appProtocol: TCP + {{- end }} port: 10254 {{- with .Values.flyteadmin.service.additionalPorts -}} {{ tpl (toYaml .) $ | nindent 4 }} diff --git a/charts/flyte-core/templates/console/service.yaml b/charts/flyte-core/templates/console/service.yaml index 7760cb6fcc..756fa0c7c9 100644 --- a/charts/flyte-core/templates/console/service.yaml +++ b/charts/flyte-core/templates/console/service.yaml @@ -16,7 +16,9 @@ spec: - name: http port: 80 protocol: TCP + {{- if .Values.flyteconsole.service.appProtocols.enabled }} appProtocol: TCP + {{- end }} targetPort: 8080 {{- if .Values.flyteconsole.serviceMonitor.enabled }} - name: http-metrics diff --git a/charts/flyte-core/values.yaml b/charts/flyte-core/values.yaml index 9faaed731a..4bc380c82d 100755 --- a/charts/flyte-core/values.yaml +++ b/charts/flyte-core/values.yaml @@ -16,7 +16,7 @@ flyteadmin: image: # -- Docker image for Flyteadmin deployment repository: cr.flyte.org/flyteorg/flyteadmin # FLYTEADMIN_IMAGE - tag: v1.13.2 # FLYTEADMIN_TAG + tag: v1.14.1 # FLYTEADMIN_TAG pullPolicy: IfNotPresent # -- Additional flyteadmin container environment variables # @@ -31,6 +31,8 @@ flyteadmin: # name: sendgrid-secret # key: api_key env: [] + # -- Additional flyteadmin environment variables from a reference (ie: Secret or ConfigMap) + envFrom: [] # -- Default resources requests and limits for Flyteadmin deployment resources: limits: @@ -50,6 +52,8 @@ flyteadmin: - flyteexamples # -- Service settings for Flyteadmin service: + appProtocols: + enabled: false annotations: projectcontour.io/upstream-protocol.h2c: grpc type: ClusterIP @@ -144,7 +148,7 @@ flytescheduler: # -- Docker image for Flytescheduler deployment repository: cr.flyte.org/flyteorg/flytescheduler # FLYTESCHEDULER_IMAGE # -- Docker image tag - tag: v1.13.2 # FLYTESCHEDULER_TAG + tag: v1.14.1 # FLYTESCHEDULER_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytescheduler deployment @@ -210,7 +214,7 @@ datacatalog: # -- Docker image for Datacatalog deployment repository: cr.flyte.org/flyteorg/datacatalog # DATACATALOG_IMAGE # -- Docker image tag - tag: v1.13.2 # DATACATALOG_TAG + tag: v1.14.1 # DATACATALOG_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Datacatalog deployment @@ -307,7 +311,7 @@ flytepropeller: image: # -- Docker image for Flytepropeller deployment repository: cr.flyte.org/flyteorg/flytepropeller # FLYTEPROPELLER_IMAGE - tag: v1.13.2 # FLYTEPROPELLER_TAG + tag: v1.14.1 # FLYTEPROPELLER_TAG pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytepropeller deployment resources: @@ -395,7 +399,7 @@ flyteconsole: image: # -- Docker image for Flyteconsole deployment repository: cr.flyte.org/flyteorg/flyteconsole # FLYTECONSOLE_IMAGE - tag: v1.17.1 # FLYTECONSOLE_TAG + tag: v1.19.0 # FLYTECONSOLE_TAG pullPolicy: IfNotPresent # -- Default resources requests and limits for Flyteconsole deployment resources: @@ -407,6 +411,8 @@ flyteconsole: memory: 50Mi # -- Service settings for Flyteconsole service: + appProtocols: + enabled: false annotations: {} type: ClusterIP # -- Annotations for Flyteconsole pods @@ -801,7 +807,7 @@ configmap: # -- Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) co-pilot: name: flyte-copilot- - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 # FLYTECOPILOT_IMAGE + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 # FLYTECOPILOT_IMAGE start-timeout: 30s # -- Core propeller configuration @@ -845,6 +851,8 @@ configmap: type: bucket rate: 10 capacity: 100 + literal-offloading-config: + enabled: true webhook: certDir: /etc/webhook/certs serviceName: flyte-pod-webhook diff --git a/charts/flyte-sandbox/README.md b/charts/flyte-sandbox/README.md index 7820ed2768..c316199b57 100644 --- a/charts/flyte-sandbox/README.md +++ b/charts/flyte-sandbox/README.md @@ -30,6 +30,8 @@ A Helm chart for the Flyte local sandbox | flyte-binary.configuration.inline.plugins.k8s.default-env-vars[0].FLYTE_AWS_ENDPOINT | string | `"http://{{ printf \"%s-minio\" .Release.Name | trunc 63 | trimSuffix \"-\" }}.{{ .Release.Namespace }}:9000"` | | | flyte-binary.configuration.inline.plugins.k8s.default-env-vars[1].FLYTE_AWS_ACCESS_KEY_ID | string | `"minio"` | | | flyte-binary.configuration.inline.plugins.k8s.default-env-vars[2].FLYTE_AWS_SECRET_ACCESS_KEY | string | `"miniostorage"` | | +| flyte-binary.configuration.inline.plugins.k8s.default-env-vars[3].FLYTE_PLATFORM_URL | string | `"{{ printf \"%s-grpc\" .Release.Name }}.{{ .Release.Namespace }}:8089"` | | +| flyte-binary.configuration.inline.plugins.k8s.default-env-vars[4].FLYTE_PLATFORM_INSECURE | bool | `true` | | | flyte-binary.configuration.inline.storage.signedURL.stowConfigOverride.endpoint | string | `"http://localhost:30002"` | | | flyte-binary.configuration.inline.task_resources.defaults.cpu | string | `"500m"` | | | flyte-binary.configuration.inline.task_resources.defaults.ephemeralStorage | int | `0` | | diff --git a/charts/flyte-sandbox/values.yaml b/charts/flyte-sandbox/values.yaml index 9743bcab33..314c8f8bb2 100644 --- a/charts/flyte-sandbox/values.yaml +++ b/charts/flyte-sandbox/values.yaml @@ -57,6 +57,8 @@ flyte-binary: - FLYTE_AWS_ENDPOINT: http://{{ printf "%s-minio" .Release.Name | trunc 63 | trimSuffix "-" }}.{{ .Release.Namespace }}:9000 - FLYTE_AWS_ACCESS_KEY_ID: minio - FLYTE_AWS_SECRET_ACCESS_KEY: miniostorage + - FLYTE_PLATFORM_URL: '{{ printf "%s-grpc" .Release.Name }}.{{ .Release.Namespace }}:8089' + - FLYTE_PLATFORM_INSECURE: True inlineConfigMap: '{{ include "flyte-sandbox.configuration.inlineConfigMap" . }}' clusterResourceTemplates: inlineConfigMap: '{{ include "flyte-sandbox.clusterResourceTemplates.inlineConfigMap" . }}' diff --git a/charts/flyte/README.md b/charts/flyte/README.md index 944c624ab6..52925587fc 100644 --- a/charts/flyte/README.md +++ b/charts/flyte/README.md @@ -71,7 +71,7 @@ helm upgrade -f values-sandbox.yaml flyte . | contour.tolerations | list | `[]` | tolerations for Contour deployment | | daskoperator | object | `{"enabled":false}` | Optional: Dask Plugin using the Dask Operator | | daskoperator.enabled | bool | `false` | - enable or disable the dask operator deployment installation | -| flyte | object | `{"cluster_resource_manager":{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"service_account_name":"flyteadmin","templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]},"common":{"databaseSecret":{"name":"","secretManifest":{}},"flyteNamespaceTemplate":{"enabled":false},"ingress":{"albSSLRedirect":false,"annotations":{"nginx.ingress.kubernetes.io/app-root":"/console"},"enabled":true,"host":"","separateGrpcIngress":false,"separateGrpcIngressAnnotations":{"nginx.ingress.kubernetes.io/backend-protocol":"GRPC"},"tls":{"enabled":false},"webpackHMR":true}},"configmap":{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpc":{"port":8089},"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.13.2","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"container":"container","container_array":"k8s-array","sensor":"agent-service","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service","echo"]}}},"k8s":{"plugins":{"k8s":{"default-cpus":"100m","default-env-from-configmaps":[],"default-env-from-secrets":[],"default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}},"datacatalog":{"affinity":{},"configPath":"/etc/datacatalog/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/datacatalog","tag":"v1.13.2"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"NodePort"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"db":{"admin":{"database":{"dbname":"flyteadmin","host":"postgres","port":5432,"username":"postgres"}},"datacatalog":{"database":{"dbname":"datacatalog","host":"postgres","port":5432,"username":"postgres"}}},"deployRedoc":true,"flyteadmin":{"additionalVolumeMounts":[],"additionalVolumes":[],"affinity":{},"configPath":"/etc/flyte/config/*.yaml","env":[],"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteadmin","tag":"v1.13.2"},"initialProjects":["flytesnacks","flytetester","flyteexamples"],"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"loadBalancerSourceRanges":[],"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flyteconsole":{"affinity":{},"ga":{"enabled":true,"tracking_id":"G-0QW4DJWJ20"},"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteconsole","tag":"v1.17.1"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","memory":"275Mi"},"requests":{"cpu":"10m","memory":"250Mi"}},"service":{"annotations":{},"type":"ClusterIP"},"tolerations":[]},"flytepropeller":{"affinity":{},"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytepropeller","tag":"v1.13.2"},"manager":false,"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"200m","ephemeral-storage":"100Mi","memory":"200Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flytescheduler":{"affinity":{},"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytescheduler","tag":"v1.13.2"},"nodeSelector":{},"podAnnotations":{},"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"storage":{"bucketName":"my-s3-bucket","cache":{"maxSizeMBs":0,"targetGCPercent":70},"custom":{},"gcs":null,"s3":{"region":"us-east-1"},"type":"sandbox"},"webhook":{"enabled":true,"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]}},"workflow_notifications":{"config":{},"enabled":false},"workflow_scheduler":{"enabled":true,"type":"native"}}` | ------------------------------------------------------------------- Core System settings This section consists of Core components of Flyte and their deployment settings. This includes FlyteAdmin service, Datacatalog, FlytePropeller and Flyteconsole | +| flyte | object | `{"cluster_resource_manager":{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"service_account_name":"flyteadmin","templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]},"common":{"databaseSecret":{"name":"","secretManifest":{}},"flyteNamespaceTemplate":{"enabled":false},"ingress":{"albSSLRedirect":false,"annotations":{"nginx.ingress.kubernetes.io/app-root":"/console"},"enabled":true,"host":"","separateGrpcIngress":false,"separateGrpcIngressAnnotations":{"nginx.ingress.kubernetes.io/backend-protocol":"GRPC"},"tls":{"enabled":false},"webpackHMR":true}},"configmap":{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpc":{"port":8089},"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.14.1","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"container":"container","container_array":"k8s-array","sensor":"agent-service","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service","echo"]}}},"k8s":{"plugins":{"k8s":{"default-cpus":"100m","default-env-from-configmaps":[],"default-env-from-secrets":[],"default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}},"datacatalog":{"affinity":{},"configPath":"/etc/datacatalog/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/datacatalog","tag":"v1.14.1"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"NodePort"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"db":{"admin":{"database":{"dbname":"flyteadmin","host":"postgres","port":5432,"username":"postgres"}},"datacatalog":{"database":{"dbname":"datacatalog","host":"postgres","port":5432,"username":"postgres"}}},"deployRedoc":true,"flyteadmin":{"additionalVolumeMounts":[],"additionalVolumes":[],"affinity":{},"configPath":"/etc/flyte/config/*.yaml","env":[],"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteadmin","tag":"v1.14.1"},"initialProjects":["flytesnacks","flytetester","flyteexamples"],"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"loadBalancerSourceRanges":[],"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flyteconsole":{"affinity":{},"ga":{"enabled":true,"tracking_id":"G-0QW4DJWJ20"},"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteconsole","tag":"v1.19.0"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","memory":"275Mi"},"requests":{"cpu":"10m","memory":"250Mi"}},"service":{"annotations":{},"type":"ClusterIP"},"tolerations":[]},"flytepropeller":{"affinity":{},"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytepropeller","tag":"v1.14.1"},"manager":false,"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"200m","ephemeral-storage":"100Mi","memory":"200Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flytescheduler":{"affinity":{},"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytescheduler","tag":"v1.14.1"},"nodeSelector":{},"podAnnotations":{},"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"storage":{"bucketName":"my-s3-bucket","cache":{"maxSizeMBs":0,"targetGCPercent":70},"custom":{},"gcs":null,"s3":{"region":"us-east-1"},"type":"sandbox"},"webhook":{"enabled":true,"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]}},"workflow_notifications":{"config":{},"enabled":false},"workflow_scheduler":{"enabled":true,"type":"native"}}` | ------------------------------------------------------------------- Core System settings This section consists of Core components of Flyte and their deployment settings. This includes FlyteAdmin service, Datacatalog, FlytePropeller and Flyteconsole | | flyte.cluster_resource_manager | object | `{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"service_account_name":"flyteadmin","templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]}` | Configuration for the Cluster resource manager component. This is an optional component, that enables automatic cluster configuration. This is useful to set default quotas, manage namespaces etc that map to a project/domain | | flyte.cluster_resource_manager.config.cluster_resources | object | `{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}` | ClusterResource parameters Refer to the [structure](https://pkg.go.dev/github.com/lyft/flyteadmin@v0.3.37/pkg/runtime/interfaces#ClusterResourceConfig) to customize. | | flyte.cluster_resource_manager.config.cluster_resources.standaloneDeployment | bool | `false` | Starts the cluster resource manager in standalone mode with requisite auth credentials to call flyteadmin service endpoints | @@ -91,15 +91,15 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.common.ingress.separateGrpcIngressAnnotations | object | `{"nginx.ingress.kubernetes.io/backend-protocol":"GRPC"}` | - Extra Ingress annotations applied only to the GRPC ingress. Only makes sense if `separateGrpcIngress` is enabled. | | flyte.common.ingress.tls | object | `{"enabled":false}` | - TLS Settings | | flyte.common.ingress.webpackHMR | bool | `true` | - Enable or disable HMR route to flyteconsole. This is useful only for frontend development. | -| flyte.configmap | object | `{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpc":{"port":8089},"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.13.2","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"container":"container","container_array":"k8s-array","sensor":"agent-service","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service","echo"]}}},"k8s":{"plugins":{"k8s":{"default-cpus":"100m","default-env-from-configmaps":[],"default-env-from-secrets":[],"default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}}` | ----------------------------------------------------------------- CONFIGMAPS SETTINGS | +| flyte.configmap | object | `{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpc":{"port":8089},"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.14.1","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"container":"container","container_array":"k8s-array","sensor":"agent-service","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service","echo"]}}},"k8s":{"plugins":{"k8s":{"default-cpus":"100m","default-env-from-configmaps":[],"default-env-from-secrets":[],"default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}}` | ----------------------------------------------------------------- CONFIGMAPS SETTINGS | | flyte.configmap.adminServer | object | `{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpc":{"port":8089},"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}}` | FlyteAdmin server configuration | | flyte.configmap.adminServer.auth | object | `{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}}` | Authentication configuration | | flyte.configmap.adminServer.server.security.secure | bool | `false` | Controls whether to serve requests over SSL/TLS. | | flyte.configmap.adminServer.server.security.useAuth | bool | `false` | Controls whether to enforce authentication. Follow the guide in https://docs.flyte.org/ on how to setup authentication. | | flyte.configmap.catalog | object | `{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}}` | Catalog Client configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/catalog#Config) Additional advanced Catalog configuration [here](https://pkg.go.dev/github.com/lyft/flyteplugins/go/tasks/pluginmachinery/catalog#Config) | | flyte.configmap.console | object | `{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"}` | Configuration for Flyte console UI | -| flyte.configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.13.2","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | -| flyte.configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.13.2","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | +| flyte.configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.14.1","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | +| flyte.configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.14.1","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | | flyte.configmap.core | object | `{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}}` | Core propeller configuration | | flyte.configmap.core.propeller | object | `{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"}` | follows the structure specified [here](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/config). | | flyte.configmap.datacatalogServer | object | `{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}}` | Datacatalog server config | @@ -120,7 +120,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.datacatalog.configPath | string | `"/etc/datacatalog/config/*.yaml"` | Default regex string for searching configuration files | | flyte.datacatalog.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.datacatalog.image.repository | string | `"cr.flyte.org/flyteorg/datacatalog"` | Docker image for Datacatalog deployment | -| flyte.datacatalog.image.tag | string | `"v1.13.2"` | Docker image tag | +| flyte.datacatalog.image.tag | string | `"v1.14.1"` | Docker image tag | | flyte.datacatalog.nodeSelector | object | `{}` | nodeSelector for Datacatalog deployment | | flyte.datacatalog.podAnnotations | object | `{}` | Annotations for Datacatalog pods | | flyte.datacatalog.replicaCount | int | `1` | Replicas count for Datacatalog deployment | @@ -136,7 +136,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.flyteadmin.env | list | `[]` | Additional flyteadmin container environment variables e.g. SendGrid's API key - name: SENDGRID_API_KEY value: "" e.g. secret environment variable (you can combine it with .additionalVolumes): - name: SENDGRID_API_KEY valueFrom: secretKeyRef: name: sendgrid-secret key: api_key | | flyte.flyteadmin.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.flyteadmin.image.repository | string | `"cr.flyte.org/flyteorg/flyteadmin"` | Docker image for Flyteadmin deployment | -| flyte.flyteadmin.image.tag | string | `"v1.13.2"` | Docker image tag | +| flyte.flyteadmin.image.tag | string | `"v1.14.1"` | Docker image tag | | flyte.flyteadmin.initialProjects | list | `["flytesnacks","flytetester","flyteexamples"]` | Initial projects to create | | flyte.flyteadmin.nodeSelector | object | `{}` | nodeSelector for Flyteadmin deployment | | flyte.flyteadmin.podAnnotations | object | `{}` | Annotations for Flyteadmin pods | @@ -151,7 +151,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.flyteconsole.affinity | object | `{}` | affinity for Flyteconsole deployment | | flyte.flyteconsole.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.flyteconsole.image.repository | string | `"cr.flyte.org/flyteorg/flyteconsole"` | Docker image for Flyteconsole deployment | -| flyte.flyteconsole.image.tag | string | `"v1.17.1"` | Docker image tag | +| flyte.flyteconsole.image.tag | string | `"v1.19.0"` | Docker image tag | | flyte.flyteconsole.nodeSelector | object | `{}` | nodeSelector for Flyteconsole deployment | | flyte.flyteconsole.podAnnotations | object | `{}` | Annotations for Flyteconsole pods | | flyte.flyteconsole.replicaCount | int | `1` | Replicas count for Flyteconsole deployment | @@ -162,7 +162,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.flytepropeller.configPath | string | `"/etc/flyte/config/*.yaml"` | Default regex string for searching configuration files | | flyte.flytepropeller.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.flytepropeller.image.repository | string | `"cr.flyte.org/flyteorg/flytepropeller"` | Docker image for Flytepropeller deployment | -| flyte.flytepropeller.image.tag | string | `"v1.13.2"` | Docker image tag | +| flyte.flytepropeller.image.tag | string | `"v1.14.1"` | Docker image tag | | flyte.flytepropeller.nodeSelector | object | `{}` | nodeSelector for Flytepropeller deployment | | flyte.flytepropeller.podAnnotations | object | `{}` | Annotations for Flytepropeller pods | | flyte.flytepropeller.replicaCount | int | `1` | Replicas count for Flytepropeller deployment | @@ -176,7 +176,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.flytescheduler.configPath | string | `"/etc/flyte/config/*.yaml"` | Default regex string for searching configuration files | | flyte.flytescheduler.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.flytescheduler.image.repository | string | `"cr.flyte.org/flyteorg/flytescheduler"` | Docker image for Flytescheduler deployment | -| flyte.flytescheduler.image.tag | string | `"v1.13.2"` | Docker image tag | +| flyte.flytescheduler.image.tag | string | `"v1.14.1"` | Docker image tag | | flyte.flytescheduler.nodeSelector | object | `{}` | nodeSelector for Flytescheduler deployment | | flyte.flytescheduler.podAnnotations | object | `{}` | Annotations for Flytescheduler pods | | flyte.flytescheduler.resources | object | `{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}}` | Default resources requests and limits for Flytescheduler deployment | diff --git a/charts/flyte/values.yaml b/charts/flyte/values.yaml index 8231f5bda1..ded750777e 100755 --- a/charts/flyte/values.yaml +++ b/charts/flyte/values.yaml @@ -15,7 +15,7 @@ flyte: # -- Docker image for Flyteadmin deployment repository: cr.flyte.org/flyteorg/flyteadmin # FLYTEADMIN_IMAGE # -- Docker image tag - tag: v1.13.2 # FLYTEADMIN_TAG + tag: v1.14.1 # FLYTEADMIN_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Additional flyteadmin container environment variables @@ -83,7 +83,7 @@ flyte: # -- Docker image for Flytescheduler deployment repository: cr.flyte.org/flyteorg/flytescheduler # FLYTESCHEDULER_IMAGE # -- Docker image tag - tag: v1.13.2 # FLYTESCHEDULER_TAG + tag: v1.14.1 # FLYTESCHEDULER_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytescheduler deployment @@ -128,7 +128,7 @@ flyte: # -- Docker image for Datacatalog deployment repository: cr.flyte.org/flyteorg/datacatalog # DATACATALOG_IMAGE # -- Docker image tag - tag: v1.13.2 # DATACATALOG_TAG + tag: v1.14.1 # DATACATALOG_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Datacatalog deployment @@ -177,7 +177,7 @@ flyte: # -- Docker image for Flytepropeller deployment repository: cr.flyte.org/flyteorg/flytepropeller # FLYTEPROPELLER_IMAGE # -- Docker image tag - tag: v1.13.2 # FLYTEPROPELLER_TAG + tag: v1.14.1 # FLYTEPROPELLER_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytepropeller deployment @@ -221,7 +221,7 @@ flyte: # -- Docker image for Flyteconsole deployment repository: cr.flyte.org/flyteorg/flyteconsole # FLYTECONSOLE_IMAGE # -- Docker image tag - tag: v1.17.1 # FLYTECONSOLE_TAG + tag: v1.19.0 # FLYTECONSOLE_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Flyteconsole deployment @@ -473,7 +473,7 @@ flyte: # -- Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) co-pilot: name: flyte-copilot- - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 # FLYTECOPILOT_IMAGE + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 # FLYTECOPILOT_IMAGE start-timeout: 30s # -- Core propeller configuration diff --git a/charts/flyteagent/README.md b/charts/flyteagent/README.md index 47ca1380b3..5c93e88495 100644 --- a/charts/flyteagent/README.md +++ b/charts/flyteagent/README.md @@ -20,7 +20,7 @@ A Helm chart for Flyte agent | fullnameOverride | string | `""` | | | image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | image.repository | string | `"cr.flyte.org/flyteorg/flyteagent"` | Docker image for flyteagent deployment | -| image.tag | string | `"1.13.6"` | Docker image tag | +| image.tag | string | `"1.14.2"` | Docker image tag | | nameOverride | string | `""` | | | nodeSelector | object | `{}` | nodeSelector for flyteagent deployment | | podAnnotations | object | `{}` | Annotations for flyteagent pods | diff --git a/charts/flyteagent/values.yaml b/charts/flyteagent/values.yaml index fbc790b286..d6b1e7df28 100755 --- a/charts/flyteagent/values.yaml +++ b/charts/flyteagent/values.yaml @@ -23,7 +23,7 @@ image: # -- Docker image for flyteagent deployment repository: cr.flyte.org/flyteorg/flyteagent # FLYTEAGENT_IMAGE # -- Docker image tag - tag: 1.13.6 # FLYTEAGENT_TAG + tag: 1.14.2 # FLYTEAGENT_TAG # -- Docker image pull policy pullPolicy: IfNotPresent ports: diff --git a/datacatalog/.golangci.yml b/datacatalog/.golangci.yml index 6d13f4a3b6..77107079d0 100644 --- a/datacatalog/.golangci.yml +++ b/datacatalog/.golangci.yml @@ -1,35 +1,25 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - run: skip-dirs: - pkg/client - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - gci - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unparam - unused - - varcheck - + - protogetter linters-settings: gci: custom-order: true @@ -38,6 +28,8 @@ linters-settings: - default - prefix(github.com/flyteorg) skip-generated: true + goconst: + ignore-tests: true issues: exclude: - copylocks diff --git a/datacatalog/pkg/manager/impl/artifact_data_store.go b/datacatalog/pkg/manager/impl/artifact_data_store.go index 5cbd3cc3e0..fa4a14c903 100644 --- a/datacatalog/pkg/manager/impl/artifact_data_store.go +++ b/datacatalog/pkg/manager/impl/artifact_data_store.go @@ -27,8 +27,8 @@ type artifactDataStore struct { } func (m *artifactDataStore) getDataLocation(ctx context.Context, artifact *datacatalog.Artifact, data *datacatalog.ArtifactData) (storage.DataReference, error) { - dataset := artifact.Dataset - return m.store.ConstructReference(ctx, m.storagePrefix, dataset.Project, dataset.Domain, dataset.Name, dataset.Version, artifact.Id, data.Name, artifactDataFile) + dataset := artifact.GetDataset() + return m.store.ConstructReference(ctx, m.storagePrefix, dataset.GetProject(), dataset.GetDomain(), dataset.GetName(), dataset.GetVersion(), artifact.GetId(), data.GetName(), artifactDataFile) } // Store marshalled data in data.pb under the storage prefix @@ -37,7 +37,7 @@ func (m *artifactDataStore) PutData(ctx context.Context, artifact *datacatalog.A if err != nil { return "", errors.NewDataCatalogErrorf(codes.Internal, "Unable to generate data location %s, err %v", dataLocation.String(), err) } - err = m.store.WriteProtobuf(ctx, dataLocation, storage.Options{}, data.Value) + err = m.store.WriteProtobuf(ctx, dataLocation, storage.Options{}, data.GetValue()) if err != nil { return "", errors.NewDataCatalogErrorf(codes.Internal, "Unable to store artifact data in location %s, err %v", dataLocation.String(), err) } diff --git a/datacatalog/pkg/manager/impl/artifact_manager.go b/datacatalog/pkg/manager/impl/artifact_manager.go index 40f3f40538..f32cb3f31f 100644 --- a/datacatalog/pkg/manager/impl/artifact_manager.go +++ b/datacatalog/pkg/manager/impl/artifact_manager.go @@ -58,7 +58,7 @@ func (m *artifactManager) CreateArtifact(ctx context.Context, request *datacatal timer := m.systemMetrics.createResponseTime.Start(ctx) defer timer.Stop() - artifact := request.Artifact + artifact := request.GetArtifact() err := validators.ValidateArtifact(artifact) if err != nil { logger.Warningf(ctx, "Invalid create artifact request %v, err: %v", request, err) @@ -66,8 +66,8 @@ func (m *artifactManager) CreateArtifact(ctx context.Context, request *datacatal return nil, err } - ctx = contextutils.WithProjectDomain(ctx, artifact.Dataset.Project, artifact.Dataset.Domain) - datasetKey := transformers.FromDatasetID(artifact.Dataset) + ctx = contextutils.WithProjectDomain(ctx, artifact.GetDataset().GetProject(), artifact.GetDataset().GetDomain()) + datasetKey := transformers.FromDatasetID(artifact.GetDataset()) // The dataset must exist for the artifact, let's verify that first dataset, err := m.repo.DatasetRepo().Get(ctx, datasetKey) @@ -80,16 +80,16 @@ func (m *artifactManager) CreateArtifact(ctx context.Context, request *datacatal // TODO: when adding a tag, need to verify one tag per partition combo // check that the artifact's partitions are the same partition values of the dataset datasetPartitionKeys := transformers.FromPartitionKeyModel(dataset.PartitionKeys) - err = validators.ValidatePartitions(datasetPartitionKeys, artifact.Partitions) + err = validators.ValidatePartitions(datasetPartitionKeys, artifact.GetPartitions()) if err != nil { - logger.Warnf(ctx, "Invalid artifact partitions %v, err: %+v", artifact.Partitions, err) + logger.Warnf(ctx, "Invalid artifact partitions %v, err: %+v", artifact.GetPartitions(), err) m.systemMetrics.createFailureCounter.Inc(ctx) return nil, err } // create Artifact Data offloaded storage files - artifactDataModels := make([]models.ArtifactData, len(request.Artifact.Data)) - for i, artifactData := range request.Artifact.Data { + artifactDataModels := make([]models.ArtifactData, len(request.GetArtifact().GetData())) + for i, artifactData := range request.GetArtifact().GetData() { dataLocation, err := m.artifactStore.PutData(ctx, artifact, artifactData) if err != nil { logger.Errorf(ctx, "Failed to store artifact data err: %v", err) @@ -97,12 +97,12 @@ func (m *artifactManager) CreateArtifact(ctx context.Context, request *datacatal return nil, err } - artifactDataModels[i].Name = artifactData.Name + artifactDataModels[i].Name = artifactData.GetName() artifactDataModels[i].Location = dataLocation.String() m.systemMetrics.createDataSuccessCounter.Inc(ctx) } - logger.Debugf(ctx, "Stored %v data for artifact %+v", len(artifactDataModels), artifact.Id) + logger.Debugf(ctx, "Stored %v data for artifact %+v", len(artifactDataModels), artifact.GetId()) artifactModel, err := transformers.CreateArtifactModel(request, artifactDataModels, dataset) if err != nil { @@ -114,7 +114,7 @@ func (m *artifactManager) CreateArtifact(ctx context.Context, request *datacatal err = m.repo.ArtifactRepo().Create(ctx, artifactModel) if err != nil { if errors.IsAlreadyExistsError(err) { - logger.Warnf(ctx, "Artifact already exists key: %+v, err %v", artifact.Id, err) + logger.Warnf(ctx, "Artifact already exists key: %+v, err %v", artifact.GetId(), err) m.systemMetrics.alreadyExistsCounter.Inc(ctx) } else { logger.Errorf(ctx, "Failed to create artifact %v, err: %v", artifactDataModels, err) @@ -123,7 +123,7 @@ func (m *artifactManager) CreateArtifact(ctx context.Context, request *datacatal return nil, err } - logger.Debugf(ctx, "Successfully created artifact id: %v", artifact.Id) + logger.Debugf(ctx, "Successfully created artifact id: %v", artifact.GetId()) m.systemMetrics.createSuccessCounter.Inc(ctx) return &datacatalog.CreateArtifactResponse{}, nil @@ -141,7 +141,7 @@ func (m *artifactManager) GetArtifact(ctx context.Context, request *datacatalog. return nil, err } - datasetID := request.Dataset + datasetID := request.GetDataset() artifactModel, err := m.findArtifact(ctx, datasetID, request) if err != nil { @@ -164,7 +164,7 @@ func (m *artifactManager) GetArtifact(ctx context.Context, request *datacatalog. } artifact.Data = artifactDataList - logger.Debugf(ctx, "Retrieved artifact dataset %v, id: %v", artifact.Dataset, artifact.Id) + logger.Debugf(ctx, "Retrieved artifact dataset %v, id: %v", artifact.GetDataset(), artifact.GetId()) m.systemMetrics.getSuccessCounter.Inc(ctx) return &datacatalog.GetArtifactResponse{ Artifact: artifact, @@ -249,7 +249,7 @@ func (m *artifactManager) ListArtifacts(ctx context.Context, request *datacatalo } // Verify the dataset exists before listing artifacts - datasetKey := transformers.FromDatasetID(request.Dataset) + datasetKey := transformers.FromDatasetID(request.GetDataset()) dataset, err := m.repo.DatasetRepo().Get(ctx, datasetKey) if err != nil { logger.Warnf(ctx, "Failed to get dataset for listing artifacts %v, err: %v", datasetKey, err) @@ -265,7 +265,7 @@ func (m *artifactManager) ListArtifacts(ctx context.Context, request *datacatalo return nil, err } - err = transformers.ApplyPagination(request.Pagination, &listInput) + err = transformers.ApplyPagination(request.GetPagination(), &listInput) if err != nil { logger.Warningf(ctx, "Invalid pagination options in list artifact request %v, err: %v", request, err) m.systemMetrics.validationErrorCounter.Inc(ctx) @@ -311,7 +311,7 @@ func (m *artifactManager) ListArtifacts(ctx context.Context, request *datacatalo // stored data will be overwritten in the underlying blob storage, no longer existing data (based on ArtifactData name) // will be deleted. func (m *artifactManager) UpdateArtifact(ctx context.Context, request *datacatalog.UpdateArtifactRequest) (*datacatalog.UpdateArtifactResponse, error) { - ctx = contextutils.WithProjectDomain(ctx, request.Dataset.Project, request.Dataset.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetDataset().GetProject(), request.GetDataset().GetDomain()) timer := m.systemMetrics.updateResponseTime.Start(ctx) defer timer.Stop() @@ -333,9 +333,9 @@ func (m *artifactManager) UpdateArtifact(ctx context.Context, request *datacatal } // artifactModel needs to be updated with new SerializedMetadata - serializedMetadata, err := transformers.SerializedMetadata(request.Metadata) + serializedMetadata, err := transformers.SerializedMetadata(request.GetMetadata()) if err != nil { - logger.Errorf(ctx, "Error in transforming Metadata from request %+v, err %v", request.Metadata, err) + logger.Errorf(ctx, "Error in transforming Metadata from request %+v, err %v", request.GetMetadata(), err) m.systemMetrics.transformerErrorCounter.Inc(ctx) m.systemMetrics.updateFailureCounter.Inc(ctx) return nil, err @@ -353,9 +353,9 @@ func (m *artifactManager) UpdateArtifact(ctx context.Context, request *datacatal // overwrite existing artifact data and upload new entries, building a map of artifact data names to remove // deleted entries from the blob storage after the upload completed artifactDataNames := make(map[string]struct{}) - artifactDataModels := make([]models.ArtifactData, len(request.Data)) - for i, artifactData := range request.Data { - artifactDataNames[artifactData.Name] = struct{}{} + artifactDataModels := make([]models.ArtifactData, len(request.GetData())) + for i, artifactData := range request.GetData() { + artifactDataNames[artifactData.GetName()] = struct{}{} dataLocation, err := m.artifactStore.PutData(ctx, artifact, artifactData) if err != nil { @@ -365,7 +365,7 @@ func (m *artifactManager) UpdateArtifact(ctx context.Context, request *datacatal return nil, err } - artifactDataModels[i].Name = artifactData.Name + artifactDataModels[i].Name = artifactData.GetName() artifactDataModels[i].Location = dataLocation.String() m.systemMetrics.updateDataSuccessCounter.Inc(ctx) } @@ -384,7 +384,7 @@ func (m *artifactManager) UpdateArtifact(ctx context.Context, request *datacatal err = m.repo.ArtifactRepo().Update(ctx, artifactModel) if err != nil { if errors.IsDoesNotExistError(err) { - logger.Warnf(ctx, "Artifact does not exist key: %+v, err %v", artifact.Id, err) + logger.Warnf(ctx, "Artifact does not exist key: %+v, err %v", artifact.GetId(), err) m.systemMetrics.doesNotExistCounter.Inc(ctx) } else { logger.Errorf(ctx, "Failed to update artifact %v, err: %v", artifactModel, err) @@ -408,11 +408,11 @@ func (m *artifactManager) UpdateArtifact(ctx context.Context, request *datacatal m.systemMetrics.deleteDataSuccessCounter.Inc(ctx) } - logger.Debugf(ctx, "Successfully updated artifact id: %v", artifact.Id) + logger.Debugf(ctx, "Successfully updated artifact id: %v", artifact.GetId()) m.systemMetrics.updateSuccessCounter.Inc(ctx) return &datacatalog.UpdateArtifactResponse{ - ArtifactId: artifact.Id, + ArtifactId: artifact.GetId(), }, nil } diff --git a/datacatalog/pkg/manager/impl/artifact_manager_test.go b/datacatalog/pkg/manager/impl/artifact_manager_test.go index 2bf39b04d9..0df125f1ec 100644 --- a/datacatalog/pkg/manager/impl/artifact_manager_test.go +++ b/datacatalog/pkg/manager/impl/artifact_manager_test.go @@ -107,50 +107,50 @@ func newMockDataCatalogRepo() *mocks.DataCatalogRepo { } func getExpectedDatastoreLocation(ctx context.Context, store *storage.DataStore, prefix storage.DataReference, artifact *datacatalog.Artifact, idx int) (storage.DataReference, error) { - return getExpectedDatastoreLocationFromName(ctx, store, prefix, artifact, artifact.Data[idx].Name) + return getExpectedDatastoreLocationFromName(ctx, store, prefix, artifact, artifact.GetData()[idx].GetName()) } func getExpectedDatastoreLocationFromName(ctx context.Context, store *storage.DataStore, prefix storage.DataReference, artifact *datacatalog.Artifact, artifactDataName string) (storage.DataReference, error) { - dataset := artifact.Dataset - return store.ConstructReference(ctx, prefix, dataset.Project, dataset.Domain, dataset.Name, dataset.Version, artifact.Id, artifactDataName, artifactDataFile) + dataset := artifact.GetDataset() + return store.ConstructReference(ctx, prefix, dataset.GetProject(), dataset.GetDomain(), dataset.GetName(), dataset.GetVersion(), artifact.GetId(), artifactDataName, artifactDataFile) } func getExpectedArtifactModel(ctx context.Context, t *testing.T, datastore *storage.DataStore, artifact *datacatalog.Artifact) models.Artifact { - expectedDataset := artifact.Dataset + expectedDataset := artifact.GetDataset() - artifactData := make([]models.ArtifactData, len(artifact.Data)) + artifactData := make([]models.ArtifactData, len(artifact.GetData())) // Write sample artifact data to the expected location and see if the retrieved data matches - for i := range artifact.Data { + for i := range artifact.GetData() { testStoragePrefix, err := datastore.ConstructReference(ctx, datastore.GetBaseContainerFQN(ctx), "test") assert.NoError(t, err) dataLocation, err := getExpectedDatastoreLocation(ctx, datastore, testStoragePrefix, artifact, i) assert.NoError(t, err) - err = datastore.WriteProtobuf(ctx, dataLocation, storage.Options{}, artifact.Data[i].Value) + err = datastore.WriteProtobuf(ctx, dataLocation, storage.Options{}, artifact.GetData()[i].GetValue()) assert.NoError(t, err) - artifactData[i].Name = artifact.Data[i].Name + artifactData[i].Name = artifact.GetData()[i].GetName() artifactData[i].Location = dataLocation.String() } // construct the artifact model we will return on the queries - serializedMetadata, err := proto.Marshal(artifact.Metadata) + serializedMetadata, err := proto.Marshal(artifact.GetMetadata()) assert.NoError(t, err) datasetKey := models.DatasetKey{ - Project: expectedDataset.Project, - Domain: expectedDataset.Domain, - Version: expectedDataset.Version, - Name: expectedDataset.Name, - UUID: expectedDataset.UUID, + Project: expectedDataset.GetProject(), + Domain: expectedDataset.GetDomain(), + Version: expectedDataset.GetVersion(), + Name: expectedDataset.GetName(), + UUID: expectedDataset.GetUUID(), } return models.Artifact{ ArtifactKey: models.ArtifactKey{ - DatasetProject: expectedDataset.Project, - DatasetDomain: expectedDataset.Domain, - DatasetVersion: expectedDataset.Version, - DatasetName: expectedDataset.Name, - ArtifactID: artifact.Id, + DatasetProject: expectedDataset.GetProject(), + DatasetDomain: expectedDataset.GetDomain(), + DatasetVersion: expectedDataset.GetVersion(), + DatasetName: expectedDataset.GetName(), + ArtifactID: artifact.GetId(), }, - DatasetUUID: expectedDataset.UUID, + DatasetUUID: expectedDataset.GetUUID(), ArtifactData: artifactData, Dataset: models.Dataset{ DatasetKey: datasetKey, @@ -162,7 +162,7 @@ func getExpectedArtifactModel(ctx context.Context, t *testing.T, datastore *stor {Key: "key2", Value: "value2"}, }, Tags: []models.Tag{ - {TagKey: models.TagKey{TagName: "test-tag"}, DatasetUUID: expectedDataset.UUID, ArtifactID: artifact.Id}, + {TagKey: models.TagKey{TagName: "test-tag"}, DatasetUUID: expectedDataset.GetUUID(), ArtifactID: artifact.GetId()}, }, BaseModel: models.BaseModel{ CreatedAt: getTestTimestamp(), @@ -180,15 +180,15 @@ func TestCreateArtifact(t *testing.T) { expectedDataset := getTestDataset() mockDatasetModel := models.Dataset{ DatasetKey: models.DatasetKey{ - Project: expectedDataset.Id.Project, - Domain: expectedDataset.Id.Domain, - Name: expectedDataset.Id.Name, - Version: expectedDataset.Id.Version, - UUID: expectedDataset.Id.UUID, + Project: expectedDataset.GetId().GetProject(), + Domain: expectedDataset.GetId().GetDomain(), + Name: expectedDataset.GetId().GetName(), + Version: expectedDataset.GetId().GetVersion(), + UUID: expectedDataset.GetId().GetUUID(), }, PartitionKeys: []models.PartitionKey{ - {Name: expectedDataset.PartitionKeys[0]}, - {Name: expectedDataset.PartitionKeys[1]}, + {Name: expectedDataset.GetPartitionKeys()[0]}, + {Name: expectedDataset.GetPartitionKeys()[1]}, }, } @@ -200,30 +200,30 @@ func TestCreateArtifact(t *testing.T) { dcRepo := newMockDataCatalogRepo() dcRepo.MockDatasetRepo.On("Get", mock.Anything, mock.MatchedBy(func(dataset models.DatasetKey) bool { - return dataset.Project == expectedDataset.Id.Project && - dataset.Domain == expectedDataset.Id.Domain && - dataset.Name == expectedDataset.Id.Name && - dataset.Version == expectedDataset.Id.Version + return dataset.Project == expectedDataset.GetId().GetProject() && + dataset.Domain == expectedDataset.GetId().GetDomain() && + dataset.Name == expectedDataset.GetId().GetName() && + dataset.Version == expectedDataset.GetId().GetVersion() })).Return(mockDatasetModel, nil) dcRepo.MockArtifactRepo.On("Create", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(artifact models.Artifact) bool { expectedArtifact := getTestArtifact() - return artifact.ArtifactID == expectedArtifact.Id && + return artifact.ArtifactID == expectedArtifact.GetId() && artifact.SerializedMetadata != nil && - len(artifact.ArtifactData) == len(expectedArtifact.Data) && - artifact.ArtifactKey.DatasetProject == expectedArtifact.Dataset.Project && - artifact.ArtifactKey.DatasetDomain == expectedArtifact.Dataset.Domain && - artifact.ArtifactKey.DatasetName == expectedArtifact.Dataset.Name && - artifact.ArtifactKey.DatasetVersion == expectedArtifact.Dataset.Version && - artifact.DatasetUUID == expectedArtifact.Dataset.UUID && - artifact.Partitions[0].Key == expectedArtifact.Partitions[0].Key && - artifact.Partitions[0].Value == expectedArtifact.Partitions[0].Value && - artifact.Partitions[0].DatasetUUID == expectedDataset.Id.UUID && - artifact.Partitions[1].Key == expectedArtifact.Partitions[1].Key && - artifact.Partitions[1].Value == expectedArtifact.Partitions[1].Value && - artifact.Partitions[1].DatasetUUID == expectedDataset.Id.UUID + len(artifact.ArtifactData) == len(expectedArtifact.GetData()) && + artifact.ArtifactKey.DatasetProject == expectedArtifact.GetDataset().GetProject() && + artifact.ArtifactKey.DatasetDomain == expectedArtifact.GetDataset().GetDomain() && + artifact.ArtifactKey.DatasetName == expectedArtifact.GetDataset().GetName() && + artifact.ArtifactKey.DatasetVersion == expectedArtifact.GetDataset().GetVersion() && + artifact.DatasetUUID == expectedArtifact.GetDataset().GetUUID() && + artifact.Partitions[0].Key == expectedArtifact.GetPartitions()[0].GetKey() && + artifact.Partitions[0].Value == expectedArtifact.GetPartitions()[0].GetValue() && + artifact.Partitions[0].DatasetUUID == expectedDataset.GetId().GetUUID() && + artifact.Partitions[1].Key == expectedArtifact.GetPartitions()[1].GetKey() && + artifact.Partitions[1].Value == expectedArtifact.GetPartitions()[1].GetValue() && + artifact.Partitions[1].DatasetUUID == expectedDataset.GetId().GetUUID() })).Return(nil) request := &datacatalog.CreateArtifactRequest{Artifact: getTestArtifact()} @@ -238,7 +238,7 @@ func TestCreateArtifact(t *testing.T) { var value core.Literal err = datastore.ReadProtobuf(ctx, dataRef, &value) assert.NoError(t, err) - assert.True(t, proto.Equal(&value, getTestArtifact().Data[0].Value)) + assert.True(t, proto.Equal(&value, getTestArtifact().GetData()[0].GetValue())) }) t.Run("Dataset does not exist", func(t *testing.T) { @@ -258,7 +258,7 @@ func TestCreateArtifact(t *testing.T) { request := &datacatalog.CreateArtifactRequest{ Artifact: &datacatalog.Artifact{ // missing artifact id - Dataset: getTestDataset().Id, + Dataset: getTestDataset().GetId(), }, } @@ -273,7 +273,7 @@ func TestCreateArtifact(t *testing.T) { request := &datacatalog.CreateArtifactRequest{ Artifact: &datacatalog.Artifact{ Id: "test", - Dataset: getTestDataset().Id, + Dataset: getTestDataset().GetId(), // missing artifactData }, } @@ -294,13 +294,13 @@ func TestCreateArtifact(t *testing.T) { mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(artifact models.Artifact) bool { expectedArtifact := getTestArtifact() - return artifact.ArtifactID == expectedArtifact.Id && + return artifact.ArtifactID == expectedArtifact.GetId() && artifact.SerializedMetadata != nil && - len(artifact.ArtifactData) == len(expectedArtifact.Data) && - artifact.ArtifactKey.DatasetProject == expectedArtifact.Dataset.Project && - artifact.ArtifactKey.DatasetDomain == expectedArtifact.Dataset.Domain && - artifact.ArtifactKey.DatasetName == expectedArtifact.Dataset.Name && - artifact.ArtifactKey.DatasetVersion == expectedArtifact.Dataset.Version + len(artifact.ArtifactData) == len(expectedArtifact.GetData()) && + artifact.ArtifactKey.DatasetProject == expectedArtifact.GetDataset().GetProject() && + artifact.ArtifactKey.DatasetDomain == expectedArtifact.GetDataset().GetDomain() && + artifact.ArtifactKey.DatasetName == expectedArtifact.GetDataset().GetName() && + artifact.ArtifactKey.DatasetVersion == expectedArtifact.GetDataset().GetVersion() })).Return(status.Error(codes.AlreadyExists, "test already exists")) request := &datacatalog.CreateArtifactRequest{Artifact: getTestArtifact()} @@ -338,10 +338,10 @@ func TestCreateArtifact(t *testing.T) { dcRepo := newMockDataCatalogRepo() mockDatasetModel := models.Dataset{ DatasetKey: models.DatasetKey{ - Project: expectedDataset.Id.Project, - Domain: expectedDataset.Id.Domain, - Name: expectedDataset.Id.Name, - Version: expectedDataset.Id.Version, + Project: expectedDataset.GetId().GetProject(), + Domain: expectedDataset.GetId().GetDomain(), + Name: expectedDataset.GetId().GetName(), + Version: expectedDataset.GetId().GetVersion(), }, } dcRepo.MockDatasetRepo.On("Get", mock.Anything, mock.Anything).Return(mockDatasetModel, nil) @@ -392,21 +392,21 @@ func TestGetArtifact(t *testing.T) { t.Run("Get by Id", func(t *testing.T) { dcRepo.MockArtifactRepo.On("Get", mock.Anything, mock.MatchedBy(func(artifactKey models.ArtifactKey) bool { - return artifactKey.ArtifactID == expectedArtifact.Id && - artifactKey.DatasetProject == expectedArtifact.Dataset.Project && - artifactKey.DatasetDomain == expectedArtifact.Dataset.Domain && - artifactKey.DatasetVersion == expectedArtifact.Dataset.Version && - artifactKey.DatasetName == expectedArtifact.Dataset.Name + return artifactKey.ArtifactID == expectedArtifact.GetId() && + artifactKey.DatasetProject == expectedArtifact.GetDataset().GetProject() && + artifactKey.DatasetDomain == expectedArtifact.GetDataset().GetDomain() && + artifactKey.DatasetVersion == expectedArtifact.GetDataset().GetVersion() && + artifactKey.DatasetName == expectedArtifact.GetDataset().GetName() })).Return(mockArtifactModel, nil) artifactManager := NewArtifactManager(dcRepo, datastore, testStoragePrefix, mockScope.NewTestScope()) artifactResponse, err := artifactManager.GetArtifact(ctx, &datacatalog.GetArtifactRequest{ - Dataset: getTestDataset().Id, - QueryHandle: &datacatalog.GetArtifactRequest_ArtifactId{ArtifactId: expectedArtifact.Id}, + Dataset: getTestDataset().GetId(), + QueryHandle: &datacatalog.GetArtifactRequest_ArtifactId{ArtifactId: expectedArtifact.GetId()}, }) assert.NoError(t, err) - assert.True(t, proto.Equal(expectedArtifact, artifactResponse.Artifact)) + assert.True(t, proto.Equal(expectedArtifact, artifactResponse.GetArtifact())) }) t.Run("Get by Artifact Tag", func(t *testing.T) { @@ -434,16 +434,16 @@ func TestGetArtifact(t *testing.T) { artifactManager := NewArtifactManager(dcRepo, datastore, testStoragePrefix, mockScope.NewTestScope()) artifactResponse, err := artifactManager.GetArtifact(ctx, &datacatalog.GetArtifactRequest{ - Dataset: getTestDataset().Id, + Dataset: getTestDataset().GetId(), QueryHandle: &datacatalog.GetArtifactRequest_TagName{TagName: expectedTag.TagName}, }) assert.NoError(t, err) - assert.True(t, proto.Equal(expectedArtifact, artifactResponse.Artifact)) + assert.True(t, proto.Equal(expectedArtifact, artifactResponse.GetArtifact())) }) t.Run("Get missing input", func(t *testing.T) { artifactManager := NewArtifactManager(dcRepo, datastore, testStoragePrefix, mockScope.NewTestScope()) - artifactResponse, err := artifactManager.GetArtifact(ctx, &datacatalog.GetArtifactRequest{Dataset: getTestDataset().Id}) + artifactResponse, err := artifactManager.GetArtifact(ctx, &datacatalog.GetArtifactRequest{Dataset: getTestDataset().GetId()}) assert.Error(t, err) assert.Nil(t, artifactResponse) responseCode := status.Code(err) @@ -454,7 +454,7 @@ func TestGetArtifact(t *testing.T) { dcRepo.MockTagRepo.On("Get", mock.Anything, mock.Anything).Return( models.Tag{}, errors.NewDataCatalogError(codes.NotFound, "tag with artifact does not exist")) artifactManager := NewArtifactManager(dcRepo, datastore, testStoragePrefix, mockScope.NewTestScope()) - artifactResponse, err := artifactManager.GetArtifact(ctx, &datacatalog.GetArtifactRequest{Dataset: getTestDataset().Id, QueryHandle: &datacatalog.GetArtifactRequest_TagName{TagName: "test"}}) + artifactResponse, err := artifactManager.GetArtifact(ctx, &datacatalog.GetArtifactRequest{Dataset: getTestDataset().GetId(), QueryHandle: &datacatalog.GetArtifactRequest_TagName{TagName: "test"}}) assert.Error(t, err) assert.Nil(t, artifactResponse) responseCode := status.Code(err) @@ -473,11 +473,11 @@ func TestListArtifact(t *testing.T) { expectedDataset := getTestDataset() mockDatasetModel := models.Dataset{ DatasetKey: models.DatasetKey{ - Project: expectedDataset.Id.Project, - Domain: expectedDataset.Id.Domain, - Name: expectedDataset.Id.Name, - Version: expectedDataset.Id.Version, - UUID: expectedDataset.Id.UUID, + Project: expectedDataset.GetId().GetProject(), + Domain: expectedDataset.GetId().GetDomain(), + Name: expectedDataset.GetId().GetName(), + Version: expectedDataset.GetId().GetVersion(), + UUID: expectedDataset.GetId().GetUUID(), }, } @@ -500,7 +500,7 @@ func TestListArtifact(t *testing.T) { }, } - artifactResponse, err := artifactManager.ListArtifacts(ctx, &datacatalog.ListArtifactsRequest{Dataset: getTestDataset().Id, Filter: filter}) + artifactResponse, err := artifactManager.ListArtifacts(ctx, &datacatalog.ListArtifactsRequest{Dataset: getTestDataset().GetId(), Filter: filter}) assert.Error(t, err) assert.Nil(t, artifactResponse) responseCode := status.Code(err) @@ -543,10 +543,10 @@ func TestListArtifact(t *testing.T) { dcRepo.MockDatasetRepo.On("Get", mock.Anything, mock.MatchedBy(func(dataset models.DatasetKey) bool { - return dataset.Project == expectedDataset.Id.Project && - dataset.Domain == expectedDataset.Id.Domain && - dataset.Name == expectedDataset.Id.Name && - dataset.Version == expectedDataset.Id.Version + return dataset.Project == expectedDataset.GetId().GetProject() && + dataset.Domain == expectedDataset.GetId().GetDomain() && + dataset.Name == expectedDataset.GetId().GetName() && + dataset.Version == expectedDataset.GetId().GetVersion() })).Return(mockDatasetModel, nil) mockArtifacts := []models.Artifact{ @@ -556,10 +556,10 @@ func TestListArtifact(t *testing.T) { dcRepo.MockArtifactRepo.On("List", mock.Anything, mock.MatchedBy(func(dataset models.DatasetKey) bool { - return dataset.Project == expectedDataset.Id.Project && - dataset.Domain == expectedDataset.Id.Domain && - dataset.Name == expectedDataset.Id.Name && - dataset.Version == expectedDataset.Id.Version + return dataset.Project == expectedDataset.GetId().GetProject() && + dataset.Domain == expectedDataset.GetId().GetDomain() && + dataset.Name == expectedDataset.GetId().GetName() && + dataset.Version == expectedDataset.GetId().GetVersion() }), mock.MatchedBy(func(listInput models.ListModelsInput) bool { return len(listInput.ModelFilters) == 3 && @@ -573,7 +573,7 @@ func TestListArtifact(t *testing.T) { listInput.Offset == 0 })).Return(mockArtifacts, nil) - artifactResponse, err := artifactManager.ListArtifacts(ctx, &datacatalog.ListArtifactsRequest{Dataset: expectedDataset.Id, Filter: filter}) + artifactResponse, err := artifactManager.ListArtifacts(ctx, &datacatalog.ListArtifactsRequest{Dataset: expectedDataset.GetId(), Filter: filter}) assert.NoError(t, err) assert.NotEmpty(t, artifactResponse) }) @@ -584,10 +584,10 @@ func TestListArtifact(t *testing.T) { dcRepo.MockDatasetRepo.On("Get", mock.Anything, mock.MatchedBy(func(dataset models.DatasetKey) bool { - return dataset.Project == expectedDataset.Id.Project && - dataset.Domain == expectedDataset.Id.Domain && - dataset.Name == expectedDataset.Id.Name && - dataset.Version == expectedDataset.Id.Version + return dataset.Project == expectedDataset.GetId().GetProject() && + dataset.Domain == expectedDataset.GetId().GetDomain() && + dataset.Name == expectedDataset.GetId().GetName() && + dataset.Version == expectedDataset.GetId().GetVersion() })).Return(mockDatasetModel, nil) mockArtifacts := []models.Artifact{ @@ -596,16 +596,16 @@ func TestListArtifact(t *testing.T) { } dcRepo.MockArtifactRepo.On("List", mock.Anything, mock.MatchedBy(func(dataset models.DatasetKey) bool { - return dataset.Project == expectedDataset.Id.Project && - dataset.Domain == expectedDataset.Id.Domain && - dataset.Name == expectedDataset.Id.Name && - dataset.Version == expectedDataset.Id.Version + return dataset.Project == expectedDataset.GetId().GetProject() && + dataset.Domain == expectedDataset.GetId().GetDomain() && + dataset.Name == expectedDataset.GetId().GetName() && + dataset.Version == expectedDataset.GetId().GetVersion() }), mock.MatchedBy(func(listInput models.ListModelsInput) bool { return len(listInput.ModelFilters) == 0 })).Return(mockArtifacts, nil) - artifactResponse, err := artifactManager.ListArtifacts(ctx, &datacatalog.ListArtifactsRequest{Dataset: expectedDataset.Id, Filter: filter}) + artifactResponse, err := artifactManager.ListArtifacts(ctx, &datacatalog.ListArtifactsRequest{Dataset: expectedDataset.GetId(), Filter: filter}) assert.NoError(t, err) assert.NotEmpty(t, artifactResponse) }) @@ -634,11 +634,11 @@ func TestUpdateArtifact(t *testing.T) { dcRepo.MockArtifactRepo.On("Get", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(artifactKey models.ArtifactKey) bool { - return artifactKey.ArtifactID == expectedArtifact.Id && - artifactKey.DatasetProject == expectedArtifact.Dataset.Project && - artifactKey.DatasetDomain == expectedArtifact.Dataset.Domain && - artifactKey.DatasetName == expectedArtifact.Dataset.Name && - artifactKey.DatasetVersion == expectedArtifact.Dataset.Version + return artifactKey.ArtifactID == expectedArtifact.GetId() && + artifactKey.DatasetProject == expectedArtifact.GetDataset().GetProject() && + artifactKey.DatasetDomain == expectedArtifact.GetDataset().GetDomain() && + artifactKey.DatasetName == expectedArtifact.GetDataset().GetName() && + artifactKey.DatasetVersion == expectedArtifact.GetDataset().GetVersion() })).Return(mockArtifactModel, nil) metaData := &datacatalog.Metadata{ @@ -650,18 +650,18 @@ func TestUpdateArtifact(t *testing.T) { dcRepo.MockArtifactRepo.On("Update", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(artifact models.Artifact) bool { - return artifact.ArtifactID == expectedArtifact.Id && - artifact.ArtifactKey.DatasetProject == expectedArtifact.Dataset.Project && - artifact.ArtifactKey.DatasetDomain == expectedArtifact.Dataset.Domain && - artifact.ArtifactKey.DatasetName == expectedArtifact.Dataset.Name && - artifact.ArtifactKey.DatasetVersion == expectedArtifact.Dataset.Version && + return artifact.ArtifactID == expectedArtifact.GetId() && + artifact.ArtifactKey.DatasetProject == expectedArtifact.GetDataset().GetProject() && + artifact.ArtifactKey.DatasetDomain == expectedArtifact.GetDataset().GetDomain() && + artifact.ArtifactKey.DatasetName == expectedArtifact.GetDataset().GetName() && + artifact.ArtifactKey.DatasetVersion == expectedArtifact.GetDataset().GetVersion() && reflect.DeepEqual(artifact.SerializedMetadata, serializedMetadata) })).Return(nil) request := &datacatalog.UpdateArtifactRequest{ - Dataset: expectedDataset.Id, + Dataset: expectedDataset.GetId(), QueryHandle: &datacatalog.UpdateArtifactRequest_ArtifactId{ - ArtifactId: expectedArtifact.Id, + ArtifactId: expectedArtifact.GetId(), }, Data: []*datacatalog.ArtifactData{ { @@ -682,7 +682,7 @@ func TestUpdateArtifact(t *testing.T) { artifactResponse, err := artifactManager.UpdateArtifact(ctx, request) assert.NoError(t, err) assert.NotNil(t, artifactResponse) - assert.Equal(t, expectedArtifact.Id, artifactResponse.GetArtifactId()) + assert.Equal(t, expectedArtifact.GetId(), artifactResponse.GetArtifactId()) dcRepo.MockArtifactRepo.AssertExpectations(t) // check that the datastore has the updated artifactData available @@ -724,11 +724,11 @@ func TestUpdateArtifact(t *testing.T) { dcRepo.MockArtifactRepo.On("Update", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(artifact models.Artifact) bool { - return artifact.ArtifactID == expectedArtifact.Id && - artifact.ArtifactKey.DatasetProject == expectedArtifact.Dataset.Project && - artifact.ArtifactKey.DatasetDomain == expectedArtifact.Dataset.Domain && - artifact.ArtifactKey.DatasetName == expectedArtifact.Dataset.Name && - artifact.ArtifactKey.DatasetVersion == expectedArtifact.Dataset.Version && + return artifact.ArtifactID == expectedArtifact.GetId() && + artifact.ArtifactKey.DatasetProject == expectedArtifact.GetDataset().GetProject() && + artifact.ArtifactKey.DatasetDomain == expectedArtifact.GetDataset().GetDomain() && + artifact.ArtifactKey.DatasetName == expectedArtifact.GetDataset().GetName() && + artifact.ArtifactKey.DatasetVersion == expectedArtifact.GetDataset().GetVersion() && reflect.DeepEqual(artifact.SerializedMetadata, serializedMetadata) })).Return(nil) @@ -753,7 +753,7 @@ func TestUpdateArtifact(t *testing.T) { }, nil) request := &datacatalog.UpdateArtifactRequest{ - Dataset: expectedDataset.Id, + Dataset: expectedDataset.GetId(), QueryHandle: &datacatalog.UpdateArtifactRequest_TagName{ TagName: expectedTag.TagName, }, @@ -776,7 +776,7 @@ func TestUpdateArtifact(t *testing.T) { artifactResponse, err := artifactManager.UpdateArtifact(ctx, request) assert.NoError(t, err) assert.NotNil(t, artifactResponse) - assert.Equal(t, expectedArtifact.Id, artifactResponse.GetArtifactId()) + assert.Equal(t, expectedArtifact.GetId(), artifactResponse.GetArtifactId()) dcRepo.MockArtifactRepo.AssertExpectations(t) // check that the datastore has the updated artifactData available @@ -809,14 +809,14 @@ func TestUpdateArtifact(t *testing.T) { dcRepo := newMockDataCatalogRepo() dcRepo.MockArtifactRepo.On("Get", mock.Anything, mock.Anything).Return(models.Artifact{}, repoErrors.GetMissingEntityError("Artifact", &datacatalog.Artifact{ - Dataset: expectedDataset.Id, - Id: expectedArtifact.Id, + Dataset: expectedDataset.GetId(), + Id: expectedArtifact.GetId(), })) request := &datacatalog.UpdateArtifactRequest{ - Dataset: expectedDataset.Id, + Dataset: expectedDataset.GetId(), QueryHandle: &datacatalog.UpdateArtifactRequest_ArtifactId{ - ArtifactId: expectedArtifact.Id, + ArtifactId: expectedArtifact.GetId(), }, Data: []*datacatalog.ArtifactData{ { @@ -844,7 +844,7 @@ func TestUpdateArtifact(t *testing.T) { dcRepo := newMockDataCatalogRepo() request := &datacatalog.UpdateArtifactRequest{ - Dataset: expectedDataset.Id, + Dataset: expectedDataset.GetId(), QueryHandle: &datacatalog.UpdateArtifactRequest_ArtifactId{}, Data: []*datacatalog.ArtifactData{ { @@ -872,7 +872,7 @@ func TestUpdateArtifact(t *testing.T) { dcRepo := newMockDataCatalogRepo() request := &datacatalog.UpdateArtifactRequest{ - Dataset: expectedDataset.Id, + Dataset: expectedDataset.GetId(), QueryHandle: &datacatalog.UpdateArtifactRequest_TagName{}, Data: []*datacatalog.ArtifactData{ { @@ -900,9 +900,9 @@ func TestUpdateArtifact(t *testing.T) { dcRepo := newMockDataCatalogRepo() request := &datacatalog.UpdateArtifactRequest{ - Dataset: expectedDataset.Id, + Dataset: expectedDataset.GetId(), QueryHandle: &datacatalog.UpdateArtifactRequest_ArtifactId{ - ArtifactId: expectedArtifact.Id, + ArtifactId: expectedArtifact.GetId(), }, Data: nil, } @@ -921,9 +921,9 @@ func TestUpdateArtifact(t *testing.T) { dcRepo := newMockDataCatalogRepo() request := &datacatalog.UpdateArtifactRequest{ - Dataset: expectedDataset.Id, + Dataset: expectedDataset.GetId(), QueryHandle: &datacatalog.UpdateArtifactRequest_ArtifactId{ - ArtifactId: expectedArtifact.Id, + ArtifactId: expectedArtifact.GetId(), }, Data: []*datacatalog.ArtifactData{}, } diff --git a/datacatalog/pkg/manager/impl/dataset_manager.go b/datacatalog/pkg/manager/impl/dataset_manager.go index 0db84d6360..8caca3f3a3 100644 --- a/datacatalog/pkg/manager/impl/dataset_manager.go +++ b/datacatalog/pkg/manager/impl/dataset_manager.go @@ -44,12 +44,12 @@ type datasetManager struct { func (dm *datasetManager) validateCreateRequest(request *datacatalog.CreateDatasetRequest) error { errorSet := make([]error, 0) - err := validators.ValidateDatasetID(request.Dataset.Id) + err := validators.ValidateDatasetID(request.GetDataset().GetId()) if err != nil { errorSet = append(errorSet, err) } - err = validators.ValidateUniquePartitionKeys(request.Dataset.PartitionKeys) + err = validators.ValidateUniquePartitionKeys(request.GetDataset().GetPartitionKeys()) if err != nil { errorSet = append(errorSet, err) } @@ -71,7 +71,7 @@ func (dm *datasetManager) CreateDataset(ctx context.Context, request *datacatalo return nil, err } - datasetModel, err := transformers.CreateDatasetModel(request.Dataset) + datasetModel, err := transformers.CreateDatasetModel(request.GetDataset()) if err != nil { logger.Errorf(ctx, "Unable to transform create dataset request %+v err: %v", request, err) dm.systemMetrics.transformerErrorCounter.Inc(ctx) @@ -81,7 +81,7 @@ func (dm *datasetManager) CreateDataset(ctx context.Context, request *datacatalo err = dm.repo.DatasetRepo().Create(ctx, *datasetModel) if err != nil { if errors.IsAlreadyExistsError(err) { - logger.Warnf(ctx, "Dataset already exists key: %+v, err %v", request.Dataset, err) + logger.Warnf(ctx, "Dataset already exists key: %+v, err %v", request.GetDataset(), err) dm.systemMetrics.alreadyExistsCounter.Inc(ctx) } else { logger.Errorf(ctx, "Failed to create dataset model: %+v err: %v", datasetModel, err) @@ -90,7 +90,7 @@ func (dm *datasetManager) CreateDataset(ctx context.Context, request *datacatalo return nil, err } - logger.Debugf(ctx, "Successfully created dataset %+v", request.Dataset) + logger.Debugf(ctx, "Successfully created dataset %+v", request.GetDataset()) dm.systemMetrics.createSuccessCounter.Inc(ctx) return &datacatalog.CreateDatasetResponse{}, nil } @@ -100,14 +100,14 @@ func (dm *datasetManager) GetDataset(ctx context.Context, request *datacatalog.G timer := dm.systemMetrics.getResponseTime.Start(ctx) defer timer.Stop() - err := validators.ValidateDatasetID(request.Dataset) + err := validators.ValidateDatasetID(request.GetDataset()) if err != nil { logger.Warnf(ctx, "Invalid get dataset request %+v err: %v", request, err) dm.systemMetrics.validationErrorCounter.Inc(ctx) return nil, err } - datasetKey := transformers.FromDatasetID(request.Dataset) + datasetKey := transformers.FromDatasetID(request.GetDataset()) datasetModel, err := dm.repo.DatasetRepo().Get(ctx, datasetKey) if err != nil { @@ -150,7 +150,7 @@ func (dm *datasetManager) ListDatasets(ctx context.Context, request *datacatalog return nil, err } - err = transformers.ApplyPagination(request.Pagination, &listInput) + err = transformers.ApplyPagination(request.GetPagination(), &listInput) if err != nil { logger.Warningf(ctx, "Invalid pagination options in list datasets request %v, err: %v", request, err) dm.systemMetrics.validationErrorCounter.Inc(ctx) @@ -171,7 +171,7 @@ func (dm *datasetManager) ListDatasets(ctx context.Context, request *datacatalog for idx, datasetModel := range datasetModels { dataset, err := transformers.FromDatasetModel(datasetModel) if err != nil { - logger.Errorf(ctx, "Unable to transform Dataset %+v err: %v", dataset.Id, err) + logger.Errorf(ctx, "Unable to transform Dataset %+v err: %v", dataset.GetId(), err) transformerErrs = append(transformerErrs, err) } diff --git a/datacatalog/pkg/manager/impl/dataset_manager_test.go b/datacatalog/pkg/manager/impl/dataset_manager_test.go index 2ebd107304..9d668fdef1 100644 --- a/datacatalog/pkg/manager/impl/dataset_manager_test.go +++ b/datacatalog/pkg/manager/impl/dataset_manager_test.go @@ -58,13 +58,13 @@ func TestCreateDataset(t *testing.T) { mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(dataset models.Dataset) bool { - return dataset.Name == expectedDataset.Id.Name && - dataset.Project == expectedDataset.Id.Project && - dataset.Domain == expectedDataset.Id.Domain && - dataset.Version == expectedDataset.Id.Version && - len(dataset.PartitionKeys) == len(expectedDataset.PartitionKeys) && - dataset.PartitionKeys[0].Name == expectedDataset.PartitionKeys[0] && - dataset.PartitionKeys[1].Name == expectedDataset.PartitionKeys[1] + return dataset.Name == expectedDataset.GetId().GetName() && + dataset.Project == expectedDataset.GetId().GetProject() && + dataset.Domain == expectedDataset.GetId().GetDomain() && + dataset.Version == expectedDataset.GetId().GetVersion() && + len(dataset.PartitionKeys) == len(expectedDataset.GetPartitionKeys()) && + dataset.PartitionKeys[0].Name == expectedDataset.GetPartitionKeys()[0] && + dataset.PartitionKeys[1].Name == expectedDataset.GetPartitionKeys()[1] })).Return(nil) request := &datacatalog.CreateDatasetRequest{Dataset: expectedDataset} datasetResponse, err := datasetManager.CreateDataset(context.Background(), request) @@ -79,10 +79,10 @@ func TestCreateDataset(t *testing.T) { mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(dataset models.Dataset) bool { - return dataset.Name == expectedDataset.Id.Name && - dataset.Project == expectedDataset.Id.Project && - dataset.Domain == expectedDataset.Id.Domain && - dataset.Version == expectedDataset.Id.Version && + return dataset.Name == expectedDataset.GetId().GetName() && + dataset.Project == expectedDataset.GetId().GetProject() && + dataset.Domain == expectedDataset.GetId().GetDomain() && + dataset.Version == expectedDataset.GetId().GetVersion() && len(dataset.PartitionKeys) == 0 })).Return(nil) @@ -132,7 +132,7 @@ func TestCreateDataset(t *testing.T) { t.Run("DuplicatePartition", func(t *testing.T) { dcRepo := getDataCatalogRepo() badDataset := getTestDataset() - badDataset.PartitionKeys = append(badDataset.PartitionKeys, badDataset.PartitionKeys[0]) + badDataset.PartitionKeys = append(badDataset.PartitionKeys, badDataset.GetPartitionKeys()[0]) datasetManager := NewDatasetManager(dcRepo, nil, mockScope.NewTestScope()) dcRepo.MockDatasetRepo.On("Create", @@ -162,17 +162,17 @@ func TestGetDataset(t *testing.T) { mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(datasetKey models.DatasetKey) bool { - return datasetKey.Name == expectedDataset.Id.Name && - datasetKey.Project == expectedDataset.Id.Project && - datasetKey.Domain == expectedDataset.Id.Domain && - datasetKey.Version == expectedDataset.Id.Version + return datasetKey.Name == expectedDataset.GetId().GetName() && + datasetKey.Project == expectedDataset.GetId().GetProject() && + datasetKey.Domain == expectedDataset.GetId().GetDomain() && + datasetKey.Version == expectedDataset.GetId().GetVersion() })).Return(*datasetModelResponse, nil) - request := &datacatalog.GetDatasetRequest{Dataset: getTestDataset().Id} + request := &datacatalog.GetDatasetRequest{Dataset: getTestDataset().GetId()} datasetResponse, err := datasetManager.GetDataset(context.Background(), request) assert.NoError(t, err) assert.NotNil(t, datasetResponse) - assert.True(t, proto.Equal(datasetResponse.Dataset, expectedDataset)) - assert.EqualValues(t, datasetResponse.Dataset.Metadata.KeyMap, expectedDataset.Metadata.KeyMap) + assert.True(t, proto.Equal(datasetResponse.GetDataset(), expectedDataset)) + assert.EqualValues(t, datasetResponse.GetDataset().GetMetadata().GetKeyMap(), expectedDataset.GetMetadata().GetKeyMap()) }) t.Run("Does not exist", func(t *testing.T) { @@ -183,12 +183,12 @@ func TestGetDataset(t *testing.T) { mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(datasetKey models.DatasetKey) bool { - return datasetKey.Name == expectedDataset.Id.Name && - datasetKey.Project == expectedDataset.Id.Project && - datasetKey.Domain == expectedDataset.Id.Domain && - datasetKey.Version == expectedDataset.Id.Version + return datasetKey.Name == expectedDataset.GetId().GetName() && + datasetKey.Project == expectedDataset.GetId().GetProject() && + datasetKey.Domain == expectedDataset.GetId().GetDomain() && + datasetKey.Version == expectedDataset.GetId().GetVersion() })).Return(models.Dataset{}, errors.NewDataCatalogError(codes.NotFound, "dataset does not exist")) - request := &datacatalog.GetDatasetRequest{Dataset: getTestDataset().Id} + request := &datacatalog.GetDatasetRequest{Dataset: getTestDataset().GetId()} _, err := datasetManager.GetDataset(context.Background(), request) assert.Error(t, err) responseCode := status.Code(err) @@ -267,7 +267,7 @@ func TestListDatasets(t *testing.T) { datasetResponse, err := datasetManager.ListDatasets(ctx, &datacatalog.ListDatasetsRequest{Filter: filter}) assert.NoError(t, err) assert.NotEmpty(t, datasetResponse) - assert.Len(t, datasetResponse.Datasets, 1) + assert.Len(t, datasetResponse.GetDatasets(), 1) }) t.Run("List Datasets with no filtering", func(t *testing.T) { @@ -286,6 +286,6 @@ func TestListDatasets(t *testing.T) { datasetResponse, err := datasetManager.ListDatasets(ctx, &datacatalog.ListDatasetsRequest{}) assert.NoError(t, err) assert.NotEmpty(t, datasetResponse) - assert.Len(t, datasetResponse.Datasets, 1) + assert.Len(t, datasetResponse.GetDatasets(), 1) }) } diff --git a/datacatalog/pkg/manager/impl/reservation_manager.go b/datacatalog/pkg/manager/impl/reservation_manager.go index 394ad5a55d..62dbb25668 100644 --- a/datacatalog/pkg/manager/impl/reservation_manager.go +++ b/datacatalog/pkg/manager/impl/reservation_manager.go @@ -88,7 +88,7 @@ func NewReservationManager( // Attempt to acquire a reservation for the specified artifact. If there is not active reservation, successfully // acquire it. If you are the owner of the active reservation, extend it. If another owner, return the existing reservation. func (r *reservationManager) GetOrExtendReservation(ctx context.Context, request *datacatalog.GetOrExtendReservationRequest) (*datacatalog.GetOrExtendReservationResponse, error) { - reservationID := request.ReservationId + reservationID := request.GetReservationId() // Use minimum of maxHeartbeatInterval and requested heartbeat interval heartbeatInterval := r.maxHeartbeatInterval @@ -97,7 +97,7 @@ func (r *reservationManager) GetOrExtendReservation(ctx context.Context, request heartbeatInterval = requestHeartbeatInterval.AsDuration() } - reservation, err := r.tryAcquireReservation(ctx, reservationID, request.OwnerId, heartbeatInterval) + reservation, err := r.tryAcquireReservation(ctx, reservationID, request.GetOwnerId(), heartbeatInterval) if err != nil { r.systemMetrics.acquireReservationFailure.Inc(ctx) return nil, err @@ -189,12 +189,12 @@ func (r *reservationManager) tryAcquireReservation(ctx context.Context, reservat // Release an active reservation with the specified owner. If one does not exist, gracefully return. func (r *reservationManager) ReleaseReservation(ctx context.Context, request *datacatalog.ReleaseReservationRequest) (*datacatalog.ReleaseReservationResponse, error) { repo := r.repo.ReservationRepo() - reservationKey := transformers.FromReservationID(request.ReservationId) + reservationKey := transformers.FromReservationID(request.GetReservationId()) - err := repo.Delete(ctx, reservationKey, request.OwnerId) + err := repo.Delete(ctx, reservationKey, request.GetOwnerId()) if err != nil { if errors.IsDoesNotExistError(err) { - logger.Warnf(ctx, "Reservation does not exist id: %+v, err %v", request.ReservationId, err) + logger.Warnf(ctx, "Reservation does not exist id: %+v, err %v", request.GetReservationId(), err) r.systemMetrics.reservationDoesNotExist.Inc(ctx) return &datacatalog.ReleaseReservationResponse{}, nil } diff --git a/datacatalog/pkg/manager/impl/reservation_manager_test.go b/datacatalog/pkg/manager/impl/reservation_manager_test.go index 0dd7408792..1281b7df89 100644 --- a/datacatalog/pkg/manager/impl/reservation_manager_test.go +++ b/datacatalog/pkg/manager/impl/reservation_manager_test.go @@ -50,10 +50,10 @@ func TestGetOrExtendReservation_CreateReservation(t *testing.T) { dcRepo.MockReservationRepo.On("Get", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(key models.ReservationKey) bool { - return key.DatasetProject == datasetID.Project && - key.DatasetDomain == datasetID.Domain && - key.DatasetVersion == datasetID.Version && - key.DatasetName == datasetID.Name && + return key.DatasetProject == datasetID.GetProject() && + key.DatasetDomain == datasetID.GetDomain() && + key.DatasetVersion == datasetID.GetVersion() && + key.DatasetName == datasetID.GetName() && key.TagName == tagName })).Return(models.Reservation{}, errors2.NewDataCatalogErrorf(codes.NotFound, "entry not found")) @@ -62,10 +62,10 @@ func TestGetOrExtendReservation_CreateReservation(t *testing.T) { dcRepo.MockReservationRepo.On("Create", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(reservation models.Reservation) bool { - return reservation.DatasetProject == datasetID.Project && - reservation.DatasetDomain == datasetID.Domain && - reservation.DatasetName == datasetID.Name && - reservation.DatasetVersion == datasetID.Version && + return reservation.DatasetProject == datasetID.GetProject() && + reservation.DatasetDomain == datasetID.GetDomain() && + reservation.DatasetName == datasetID.GetName() && + reservation.DatasetVersion == datasetID.GetVersion() && reservation.TagName == tagName && reservation.OwnerID == currentOwner && reservation.ExpiresAt == now.Add(heartbeatInterval*heartbeatGracePeriodMultiplier) @@ -86,8 +86,8 @@ func TestGetOrExtendReservation_CreateReservation(t *testing.T) { resp, err := reservationManager.GetOrExtendReservation(context.Background(), &req) assert.Nil(t, err) - assert.Equal(t, currentOwner, resp.GetReservation().OwnerId) - assert.Equal(t, heartbeatIntervalPb, resp.GetReservation().HeartbeatInterval) + assert.Equal(t, currentOwner, resp.GetReservation().GetOwnerId()) + assert.Equal(t, heartbeatIntervalPb, resp.GetReservation().GetHeartbeatInterval()) } func TestGetOrExtendReservation_MaxHeartbeatInterval(t *testing.T) { @@ -98,10 +98,10 @@ func TestGetOrExtendReservation_MaxHeartbeatInterval(t *testing.T) { dcRepo.MockReservationRepo.On("Get", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(key models.ReservationKey) bool { - return key.DatasetProject == datasetID.Project && - key.DatasetDomain == datasetID.Domain && - key.DatasetVersion == datasetID.Version && - key.DatasetName == datasetID.Name && + return key.DatasetProject == datasetID.GetProject() && + key.DatasetDomain == datasetID.GetDomain() && + key.DatasetVersion == datasetID.GetVersion() && + key.DatasetName == datasetID.GetName() && key.TagName == tagName })).Return(models.Reservation{}, errors2.NewDataCatalogErrorf(codes.NotFound, "entry not found")) @@ -110,10 +110,10 @@ func TestGetOrExtendReservation_MaxHeartbeatInterval(t *testing.T) { dcRepo.MockReservationRepo.On("Create", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(reservation models.Reservation) bool { - return reservation.DatasetProject == datasetID.Project && - reservation.DatasetDomain == datasetID.Domain && - reservation.DatasetName == datasetID.Name && - reservation.DatasetVersion == datasetID.Version && + return reservation.DatasetProject == datasetID.GetProject() && + reservation.DatasetDomain == datasetID.GetDomain() && + reservation.DatasetName == datasetID.GetName() && + reservation.DatasetVersion == datasetID.GetVersion() && reservation.TagName == tagName && reservation.OwnerID == currentOwner && reservation.ExpiresAt == now.Add(heartbeatInterval*heartbeatGracePeriodMultiplier) @@ -134,8 +134,8 @@ func TestGetOrExtendReservation_MaxHeartbeatInterval(t *testing.T) { resp, err := reservationManager.GetOrExtendReservation(context.Background(), &req) assert.Nil(t, err) - assert.Equal(t, currentOwner, resp.GetReservation().OwnerId) - assert.Equal(t, heartbeatIntervalPb, resp.GetReservation().HeartbeatInterval) + assert.Equal(t, currentOwner, resp.GetReservation().GetOwnerId()) + assert.Equal(t, heartbeatIntervalPb, resp.GetReservation().GetHeartbeatInterval()) } func TestGetOrExtendReservation_ExtendReservation(t *testing.T) { @@ -151,10 +151,10 @@ func TestGetOrExtendReservation_ExtendReservation(t *testing.T) { dcRepo.MockReservationRepo.On("Update", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(reservation models.Reservation) bool { - return reservation.DatasetProject == datasetID.Project && - reservation.DatasetDomain == datasetID.Domain && - reservation.DatasetName == datasetID.Name && - reservation.DatasetVersion == datasetID.Version && + return reservation.DatasetProject == datasetID.GetProject() && + reservation.DatasetDomain == datasetID.GetDomain() && + reservation.DatasetName == datasetID.GetName() && + reservation.DatasetVersion == datasetID.GetVersion() && reservation.TagName == tagName && reservation.OwnerID == prevOwner && reservation.ExpiresAt == now.Add(heartbeatInterval*heartbeatGracePeriodMultiplier) @@ -175,7 +175,7 @@ func TestGetOrExtendReservation_ExtendReservation(t *testing.T) { resp, err := reservationManager.GetOrExtendReservation(context.Background(), &req) assert.Nil(t, err) - assert.Equal(t, prevOwner, resp.GetReservation().OwnerId) + assert.Equal(t, prevOwner, resp.GetReservation().GetOwnerId()) } func TestGetOrExtendReservation_TakeOverReservation(t *testing.T) { @@ -191,10 +191,10 @@ func TestGetOrExtendReservation_TakeOverReservation(t *testing.T) { dcRepo.MockReservationRepo.On("Update", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(reservation models.Reservation) bool { - return reservation.DatasetProject == datasetID.Project && - reservation.DatasetDomain == datasetID.Domain && - reservation.DatasetName == datasetID.Name && - reservation.DatasetVersion == datasetID.Version && + return reservation.DatasetProject == datasetID.GetProject() && + reservation.DatasetDomain == datasetID.GetDomain() && + reservation.DatasetName == datasetID.GetName() && + reservation.DatasetVersion == datasetID.GetVersion() && reservation.TagName == tagName && reservation.OwnerID == currentOwner && reservation.ExpiresAt == now.Add(heartbeatInterval*heartbeatGracePeriodMultiplier) @@ -215,7 +215,7 @@ func TestGetOrExtendReservation_TakeOverReservation(t *testing.T) { resp, err := reservationManager.GetOrExtendReservation(context.Background(), &req) assert.Nil(t, err) - assert.Equal(t, currentOwner, resp.GetReservation().OwnerId) + assert.Equal(t, currentOwner, resp.GetReservation().GetOwnerId()) } func TestGetOrExtendReservation_ReservationExists(t *testing.T) { @@ -241,7 +241,7 @@ func TestGetOrExtendReservation_ReservationExists(t *testing.T) { resp, err := reservationManager.GetOrExtendReservation(context.Background(), &req) assert.Nil(t, err) - assert.Equal(t, prevOwner, resp.GetReservation().OwnerId) + assert.Equal(t, prevOwner, resp.GetReservation().GetOwnerId()) } func TestReleaseReservation(t *testing.T) { @@ -252,10 +252,10 @@ func TestReleaseReservation(t *testing.T) { dcRepo.MockReservationRepo.On("Delete", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(reservationKey models.ReservationKey) bool { - return reservationKey.DatasetProject == datasetID.Project && - reservationKey.DatasetDomain == datasetID.Domain && - reservationKey.DatasetName == datasetID.Name && - reservationKey.DatasetVersion == datasetID.Version && + return reservationKey.DatasetProject == datasetID.GetProject() && + reservationKey.DatasetDomain == datasetID.GetDomain() && + reservationKey.DatasetName == datasetID.GetName() && + reservationKey.DatasetVersion == datasetID.GetVersion() && reservationKey.TagName == tagName }), mock.MatchedBy(func(ownerID string) bool { @@ -286,10 +286,10 @@ func TestReleaseReservation_Failure(t *testing.T) { dcRepo.MockReservationRepo.On("Delete", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(reservationKey models.ReservationKey) bool { - return reservationKey.DatasetProject == datasetID.Project && - reservationKey.DatasetDomain == datasetID.Domain && - reservationKey.DatasetName == datasetID.Name && - reservationKey.DatasetVersion == datasetID.Version && + return reservationKey.DatasetProject == datasetID.GetProject() && + reservationKey.DatasetDomain == datasetID.GetDomain() && + reservationKey.DatasetName == datasetID.GetName() && + reservationKey.DatasetVersion == datasetID.GetVersion() && reservationKey.TagName == tagName }), mock.MatchedBy(func(ownerID string) bool { @@ -324,10 +324,10 @@ func TestReleaseReservation_GracefulFailure(t *testing.T) { dcRepo.MockReservationRepo.On("Delete", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(reservationKey models.ReservationKey) bool { - return reservationKey.DatasetProject == datasetID.Project && - reservationKey.DatasetDomain == datasetID.Domain && - reservationKey.DatasetName == datasetID.Name && - reservationKey.DatasetVersion == datasetID.Version && + return reservationKey.DatasetProject == datasetID.GetProject() && + reservationKey.DatasetDomain == datasetID.GetDomain() && + reservationKey.DatasetName == datasetID.GetName() && + reservationKey.DatasetVersion == datasetID.GetVersion() && reservationKey.TagName == tagName }), mock.MatchedBy(func(ownerID string) bool { @@ -360,10 +360,10 @@ func setUpReservationRepoGet(dcRepo *mocks.DataCatalogRepo, prevExpiresAt time.T dcRepo.MockReservationRepo.On("Get", mock.MatchedBy(func(ctx context.Context) bool { return true }), mock.MatchedBy(func(key models.ReservationKey) bool { - return key.DatasetProject == datasetID.Project && - key.DatasetDomain == datasetID.Domain && - key.DatasetVersion == datasetID.Version && - key.DatasetName == datasetID.Name && + return key.DatasetProject == datasetID.GetProject() && + key.DatasetDomain == datasetID.GetDomain() && + key.DatasetVersion == datasetID.GetVersion() && + key.DatasetName == datasetID.GetName() && key.TagName == tagName })).Return( models.Reservation{ diff --git a/datacatalog/pkg/manager/impl/tag_manager.go b/datacatalog/pkg/manager/impl/tag_manager.go index 784af9164c..29280b83b4 100644 --- a/datacatalog/pkg/manager/impl/tag_manager.go +++ b/datacatalog/pkg/manager/impl/tag_manager.go @@ -37,15 +37,15 @@ func (m *tagManager) AddTag(ctx context.Context, request *datacatalog.AddTagRequ timer := m.systemMetrics.createResponseTime.Start(ctx) defer timer.Stop() - if err := validators.ValidateTag(request.Tag); err != nil { + if err := validators.ValidateTag(request.GetTag()); err != nil { logger.Warnf(ctx, "Invalid get tag request %+v err: %v", request, err) m.systemMetrics.validationErrorCounter.Inc(ctx) return nil, err } // verify the artifact and dataset exists before adding a tag to it - datasetID := request.Tag.Dataset - ctx = contextutils.WithProjectDomain(ctx, datasetID.Project, datasetID.Domain) + datasetID := request.GetTag().GetDataset() + ctx = contextutils.WithProjectDomain(ctx, datasetID.GetProject(), datasetID.GetDomain()) datasetKey := transformers.FromDatasetID(datasetID) dataset, err := m.repo.DatasetRepo().Get(ctx, datasetKey) @@ -54,17 +54,17 @@ func (m *tagManager) AddTag(ctx context.Context, request *datacatalog.AddTagRequ return nil, err } - artifactKey := transformers.ToArtifactKey(datasetID, request.Tag.ArtifactId) + artifactKey := transformers.ToArtifactKey(datasetID, request.GetTag().GetArtifactId()) _, err = m.repo.ArtifactRepo().Get(ctx, artifactKey) if err != nil { m.systemMetrics.addTagFailureCounter.Inc(ctx) return nil, err } - tagKey := transformers.ToTagKey(datasetID, request.Tag.Name) + tagKey := transformers.ToTagKey(datasetID, request.GetTag().GetName()) err = m.repo.TagRepo().Create(ctx, models.Tag{ TagKey: tagKey, - ArtifactID: request.Tag.ArtifactId, + ArtifactID: request.GetTag().GetArtifactId(), DatasetUUID: dataset.UUID, }) if err != nil { diff --git a/datacatalog/pkg/manager/impl/tag_manager_test.go b/datacatalog/pkg/manager/impl/tag_manager_test.go index 98e4b41dfd..e77d3abbe1 100644 --- a/datacatalog/pkg/manager/impl/tag_manager_test.go +++ b/datacatalog/pkg/manager/impl/tag_manager_test.go @@ -129,7 +129,7 @@ func TestAddTag(t *testing.T) { _, err := tagManager.AddTag(context.Background(), &datacatalog.AddTagRequest{ Tag: &datacatalog.Tag{ ArtifactId: "noArtifact", - Dataset: getTestDataset().Id, + Dataset: getTestDataset().GetId(), }, }) @@ -143,7 +143,7 @@ func TestAddTag(t *testing.T) { _, err := tagManager.AddTag(context.Background(), &datacatalog.AddTagRequest{ Tag: &datacatalog.Tag{ Name: "noArtifact", - Dataset: getTestDataset().Id, + Dataset: getTestDataset().GetId(), }, }) diff --git a/datacatalog/pkg/manager/impl/validators/artifact_validator.go b/datacatalog/pkg/manager/impl/validators/artifact_validator.go index d4721e1597..caf1ad3d58 100644 --- a/datacatalog/pkg/manager/impl/validators/artifact_validator.go +++ b/datacatalog/pkg/manager/impl/validators/artifact_validator.go @@ -18,10 +18,10 @@ func ValidateGetArtifactRequest(request *datacatalog.GetArtifactRequest) error { return NewMissingArgumentError(fmt.Sprintf("one of %s/%s", artifactID, tagName)) } - switch request.QueryHandle.(type) { + switch request.GetQueryHandle().(type) { case *datacatalog.GetArtifactRequest_ArtifactId: - if request.Dataset != nil { - err := ValidateDatasetID(request.Dataset) + if request.GetDataset() != nil { + err := ValidateDatasetID(request.GetDataset()) if err != nil { return err } @@ -31,7 +31,7 @@ func ValidateGetArtifactRequest(request *datacatalog.GetArtifactRequest) error { return err } case *datacatalog.GetArtifactRequest_TagName: - if err := ValidateDatasetID(request.Dataset); err != nil { + if err := ValidateDatasetID(request.GetDataset()); err != nil { return err } @@ -58,15 +58,15 @@ func ValidateArtifact(artifact *datacatalog.Artifact) error { return NewMissingArgumentError(artifactEntity) } - if err := ValidateDatasetID(artifact.Dataset); err != nil { + if err := ValidateDatasetID(artifact.GetDataset()); err != nil { return err } - if err := ValidateEmptyStringField(artifact.Id, artifactID); err != nil { + if err := ValidateEmptyStringField(artifact.GetId(), artifactID); err != nil { return err } - if err := ValidateEmptyArtifactData(artifact.Data); err != nil { + if err := ValidateEmptyArtifactData(artifact.GetData()); err != nil { return err } @@ -75,16 +75,16 @@ func ValidateArtifact(artifact *datacatalog.Artifact) error { // Validate the list request and format the request with proper defaults if not provided func ValidateListArtifactRequest(request *datacatalog.ListArtifactsRequest) error { - if err := ValidateDatasetID(request.Dataset); err != nil { + if err := ValidateDatasetID(request.GetDataset()); err != nil { return err } - if err := ValidateArtifactFilterTypes(request.Filter.GetFilters()); err != nil { + if err := ValidateArtifactFilterTypes(request.GetFilter().GetFilters()); err != nil { return err } - if request.Pagination != nil { - err := ValidatePagination(request.Pagination) + if request.GetPagination() != nil { + err := ValidatePagination(request.GetPagination()) if err != nil { return err } @@ -108,10 +108,10 @@ func ValidateUpdateArtifactRequest(request *datacatalog.UpdateArtifactRequest) e return NewMissingArgumentError(fmt.Sprintf("one of %s/%s", artifactID, tagName)) } - switch request.QueryHandle.(type) { + switch request.GetQueryHandle().(type) { case *datacatalog.UpdateArtifactRequest_ArtifactId: - if request.Dataset != nil { - err := ValidateDatasetID(request.Dataset) + if request.GetDataset() != nil { + err := ValidateDatasetID(request.GetDataset()) if err != nil { return err } @@ -121,7 +121,7 @@ func ValidateUpdateArtifactRequest(request *datacatalog.UpdateArtifactRequest) e return err } case *datacatalog.UpdateArtifactRequest_TagName: - if err := ValidateDatasetID(request.Dataset); err != nil { + if err := ValidateDatasetID(request.GetDataset()); err != nil { return err } @@ -132,7 +132,7 @@ func ValidateUpdateArtifactRequest(request *datacatalog.UpdateArtifactRequest) e return NewInvalidArgumentError("QueryHandle", "invalid type") } - if err := ValidateEmptyArtifactData(request.Data); err != nil { + if err := ValidateEmptyArtifactData(request.GetData()); err != nil { return err } diff --git a/datacatalog/pkg/manager/impl/validators/dataset_validator.go b/datacatalog/pkg/manager/impl/validators/dataset_validator.go index 5ab010517f..3cd60c57f9 100644 --- a/datacatalog/pkg/manager/impl/validators/dataset_validator.go +++ b/datacatalog/pkg/manager/impl/validators/dataset_validator.go @@ -18,16 +18,16 @@ func ValidateDatasetID(ds *datacatalog.DatasetID) error { if ds == nil { return NewMissingArgumentError(datasetEntity) } - if err := ValidateEmptyStringField(ds.Project, datasetProject); err != nil { + if err := ValidateEmptyStringField(ds.GetProject(), datasetProject); err != nil { return err } - if err := ValidateEmptyStringField(ds.Domain, datasetDomain); err != nil { + if err := ValidateEmptyStringField(ds.GetDomain(), datasetDomain); err != nil { return err } - if err := ValidateEmptyStringField(ds.Name, datasetName); err != nil { + if err := ValidateEmptyStringField(ds.GetName(), datasetName); err != nil { return err } - if err := ValidateEmptyStringField(ds.Version, datasetVersion); err != nil { + if err := ValidateEmptyStringField(ds.GetVersion(), datasetVersion); err != nil { return err } return nil @@ -35,15 +35,15 @@ func ValidateDatasetID(ds *datacatalog.DatasetID) error { // Ensure list Datasets request is properly constructed func ValidateListDatasetsRequest(request *datacatalog.ListDatasetsRequest) error { - if request.Pagination != nil { - err := ValidatePagination(request.Pagination) + if request.GetPagination() != nil { + err := ValidatePagination(request.GetPagination()) if err != nil { return err } } // Datasets cannot be filtered by tag, partitions or artifacts - for _, filter := range request.Filter.GetFilters() { + for _, filter := range request.GetFilter().GetFilters() { if filter.GetTagFilter() != nil { return NewInvalidFilterError(common.Dataset, common.Tag) } else if filter.GetPartitionFilter() != nil { diff --git a/datacatalog/pkg/manager/impl/validators/errors.go b/datacatalog/pkg/manager/impl/validators/errors.go index dae123ebfd..eec033b8f9 100644 --- a/datacatalog/pkg/manager/impl/validators/errors.go +++ b/datacatalog/pkg/manager/impl/validators/errors.go @@ -14,13 +14,13 @@ const invalidArgFormat = "invalid value for %s, value:[%s]" const invalidFilterFormat = "%s cannot be filtered by %s properties" func NewMissingArgumentError(field string) error { - return errors.NewDataCatalogErrorf(codes.InvalidArgument, fmt.Sprintf(missingFieldFormat, field)) + return errors.NewDataCatalogErrorf(codes.InvalidArgument, fmt.Sprintf(missingFieldFormat, field)) //nolint } func NewInvalidArgumentError(field string, value string) error { - return errors.NewDataCatalogErrorf(codes.InvalidArgument, fmt.Sprintf(invalidArgFormat, field, value)) + return errors.NewDataCatalogErrorf(codes.InvalidArgument, fmt.Sprintf(invalidArgFormat, field, value)) //nolint } func NewInvalidFilterError(entity common.Entity, propertyEntity common.Entity) error { - return errors.NewDataCatalogErrorf(codes.InvalidArgument, fmt.Sprintf(invalidFilterFormat, entity, propertyEntity)) + return errors.NewDataCatalogErrorf(codes.InvalidArgument, fmt.Sprintf(invalidFilterFormat, entity, propertyEntity)) //nolint } diff --git a/datacatalog/pkg/manager/impl/validators/pagination_validator.go b/datacatalog/pkg/manager/impl/validators/pagination_validator.go index 7f37dbe7d5..19072bec74 100644 --- a/datacatalog/pkg/manager/impl/validators/pagination_validator.go +++ b/datacatalog/pkg/manager/impl/validators/pagination_validator.go @@ -27,18 +27,18 @@ func ValidateToken(token string) error { // Validate the pagination options and set default limits func ValidatePagination(options *datacatalog.PaginationOptions) error { - err := ValidateToken(options.Token) + err := ValidateToken(options.GetToken()) if err != nil { return err } - if options.SortKey != datacatalog.PaginationOptions_CREATION_TIME { - return errors.NewDataCatalogErrorf(codes.InvalidArgument, "Invalid sort key %v", options.SortKey) + if options.GetSortKey() != datacatalog.PaginationOptions_CREATION_TIME { + return errors.NewDataCatalogErrorf(codes.InvalidArgument, "Invalid sort key %v", options.GetSortKey()) } - if options.SortOrder != datacatalog.PaginationOptions_ASCENDING && - options.SortOrder != datacatalog.PaginationOptions_DESCENDING { - return errors.NewDataCatalogErrorf(codes.InvalidArgument, "Invalid sort order %v", options.SortOrder) + if options.GetSortOrder() != datacatalog.PaginationOptions_ASCENDING && + options.GetSortOrder() != datacatalog.PaginationOptions_DESCENDING { + return errors.NewDataCatalogErrorf(codes.InvalidArgument, "Invalid sort order %v", options.GetSortOrder()) } return nil diff --git a/datacatalog/pkg/manager/impl/validators/partition_validator.go b/datacatalog/pkg/manager/impl/validators/partition_validator.go index 2b94e0e366..567cf300c6 100644 --- a/datacatalog/pkg/manager/impl/validators/partition_validator.go +++ b/datacatalog/pkg/manager/impl/validators/partition_validator.go @@ -39,15 +39,15 @@ func ValidatePartitions(datasetPartitionKeys []string, artifactPartitions []*dat continue } - if err := ValidateEmptyStringField(partitionKeyName, artifactPartition.Key); err != nil { + if err := ValidateEmptyStringField(partitionKeyName, artifactPartition.GetKey()); err != nil { partitionErrors = append(partitionErrors, NewMissingArgumentError(fmt.Sprintf("%v[%v]", partitionKeyName, idx))) - } else if err := ValidateEmptyStringField(partitionValueName, artifactPartition.Value); err != nil { + } else if err := ValidateEmptyStringField(partitionValueName, artifactPartition.GetValue()); err != nil { partitionErrors = append(partitionErrors, NewMissingArgumentError(fmt.Sprintf("%v[%v]", partitionValueName, idx))) } else { - _, ok := partitionKeyMatches[artifactPartition.Key] + _, ok := partitionKeyMatches[artifactPartition.GetKey()] if ok { - partitionKeyMatches[artifactPartition.Key] = true + partitionKeyMatches[artifactPartition.GetKey()] = true } else { keyMismatch = true } diff --git a/datacatalog/pkg/manager/impl/validators/tag_validator.go b/datacatalog/pkg/manager/impl/validators/tag_validator.go index 4539ad03c2..7fda9c873d 100644 --- a/datacatalog/pkg/manager/impl/validators/tag_validator.go +++ b/datacatalog/pkg/manager/impl/validators/tag_validator.go @@ -13,15 +13,15 @@ func ValidateTag(tag *datacatalog.Tag) error { if tag == nil { return NewMissingArgumentError(tagEntity) } - if err := ValidateDatasetID(tag.Dataset); err != nil { + if err := ValidateDatasetID(tag.GetDataset()); err != nil { return err } - if err := ValidateEmptyStringField(tag.Name, tagName); err != nil { + if err := ValidateEmptyStringField(tag.GetName(), tagName); err != nil { return err } - if err := ValidateEmptyStringField(tag.ArtifactId, artifactID); err != nil { + if err := ValidateEmptyStringField(tag.GetArtifactId(), artifactID); err != nil { return err } return nil diff --git a/datacatalog/pkg/repositories/errors/postgres.go b/datacatalog/pkg/repositories/errors/postgres.go index 2ab8a2895c..31e1c253d6 100644 --- a/datacatalog/pkg/repositories/errors/postgres.go +++ b/datacatalog/pkg/repositories/errors/postgres.go @@ -62,7 +62,7 @@ func (p *postgresErrorTransformer) ToDataCatalogError(err error) error { case undefinedTable: return catalogErrors.NewDataCatalogErrorf(codes.InvalidArgument, unsupportedTableOperation, pqError.Message) default: - return catalogErrors.NewDataCatalogErrorf(codes.Unknown, fmt.Sprintf(defaultPgError, pqError.Code, pqError.Message)) + return catalogErrors.NewDataCatalogErrorf(codes.Unknown, fmt.Sprintf(defaultPgError, pqError.Code, pqError.Message)) //nolint } } diff --git a/datacatalog/pkg/repositories/transformers/artifact.go b/datacatalog/pkg/repositories/transformers/artifact.go index 57890ef4b1..c962fd5ce1 100644 --- a/datacatalog/pkg/repositories/transformers/artifact.go +++ b/datacatalog/pkg/repositories/transformers/artifact.go @@ -18,29 +18,29 @@ func SerializedMetadata(metadata *datacatalog.Metadata) ([]byte, error) { } func CreateArtifactModel(request *datacatalog.CreateArtifactRequest, artifactData []models.ArtifactData, dataset models.Dataset) (models.Artifact, error) { - datasetID := request.Artifact.Dataset + datasetID := request.GetArtifact().GetDataset() - serializedMetadata, err := marshalMetadata(request.Artifact.Metadata) + serializedMetadata, err := marshalMetadata(request.GetArtifact().GetMetadata()) if err != nil { return models.Artifact{}, err } - partitions := make([]models.Partition, len(request.Artifact.Partitions)) - for i, partition := range request.Artifact.GetPartitions() { + partitions := make([]models.Partition, len(request.GetArtifact().GetPartitions())) + for i, partition := range request.GetArtifact().GetPartitions() { partitions[i] = models.Partition{ DatasetUUID: dataset.UUID, - Key: partition.Key, - Value: partition.Value, + Key: partition.GetKey(), + Value: partition.GetValue(), } } return models.Artifact{ ArtifactKey: models.ArtifactKey{ - DatasetProject: datasetID.Project, - DatasetDomain: datasetID.Domain, - DatasetName: datasetID.Name, - DatasetVersion: datasetID.Version, - ArtifactID: request.Artifact.Id, + DatasetProject: datasetID.GetProject(), + DatasetDomain: datasetID.GetDomain(), + DatasetName: datasetID.GetName(), + DatasetVersion: datasetID.GetVersion(), + ArtifactID: request.GetArtifact().GetId(), }, DatasetUUID: dataset.UUID, ArtifactData: artifactData, @@ -112,10 +112,10 @@ func ToArtifactKey(datasetID *datacatalog.DatasetID, artifactID string) models.A ArtifactID: artifactID, } if datasetID != nil { - artifactKey.DatasetProject = datasetID.Project - artifactKey.DatasetDomain = datasetID.Domain - artifactKey.DatasetName = datasetID.Name - artifactKey.DatasetVersion = datasetID.Version + artifactKey.DatasetProject = datasetID.GetProject() + artifactKey.DatasetDomain = datasetID.GetDomain() + artifactKey.DatasetName = datasetID.GetName() + artifactKey.DatasetVersion = datasetID.GetVersion() } return artifactKey } diff --git a/datacatalog/pkg/repositories/transformers/artifact_test.go b/datacatalog/pkg/repositories/transformers/artifact_test.go index 350a2396aa..5c556fcabb 100644 --- a/datacatalog/pkg/repositories/transformers/artifact_test.go +++ b/datacatalog/pkg/repositories/transformers/artifact_test.go @@ -50,11 +50,11 @@ func getTestTags() []models.Tag { func getDatasetModel() models.Dataset { return models.Dataset{ DatasetKey: models.DatasetKey{ - Project: datasetID.Project, - Domain: datasetID.Domain, - Name: datasetID.Name, - Version: datasetID.Version, - UUID: datasetID.UUID, + Project: datasetID.GetProject(), + Domain: datasetID.GetDomain(), + Name: datasetID.GetName(), + Version: datasetID.GetVersion(), + UUID: datasetID.GetUUID(), }, } } @@ -81,11 +81,11 @@ func TestCreateArtifactModel(t *testing.T) { artifactModel, err := CreateArtifactModel(createArtifactRequest, testArtifactData, getDatasetModel()) assert.NoError(t, err) - assert.Equal(t, artifactModel.ArtifactID, createArtifactRequest.Artifact.Id) - assert.Equal(t, artifactModel.ArtifactKey.DatasetProject, datasetID.Project) - assert.Equal(t, artifactModel.ArtifactKey.DatasetDomain, datasetID.Domain) - assert.Equal(t, artifactModel.ArtifactKey.DatasetName, datasetID.Name) - assert.Equal(t, artifactModel.ArtifactKey.DatasetVersion, datasetID.Version) + assert.Equal(t, artifactModel.ArtifactID, createArtifactRequest.GetArtifact().GetId()) + assert.Equal(t, artifactModel.ArtifactKey.DatasetProject, datasetID.GetProject()) + assert.Equal(t, artifactModel.ArtifactKey.DatasetDomain, datasetID.GetDomain()) + assert.Equal(t, artifactModel.ArtifactKey.DatasetName, datasetID.GetName()) + assert.Equal(t, artifactModel.ArtifactKey.DatasetVersion, datasetID.GetVersion()) assert.EqualValues(t, testArtifactData, artifactModel.ArtifactData) assert.EqualValues(t, getTestPartitions(), artifactModel.Partitions) } @@ -130,32 +130,32 @@ func TestFromArtifactModel(t *testing.T) { actual, err := FromArtifactModel(artifactModel) assert.NoError(t, err) - assert.Equal(t, artifactModel.ArtifactID, actual.Id) - assert.Equal(t, artifactModel.DatasetProject, actual.Dataset.Project) - assert.Equal(t, artifactModel.DatasetDomain, actual.Dataset.Domain) - assert.Equal(t, artifactModel.DatasetName, actual.Dataset.Name) - assert.Equal(t, artifactModel.DatasetVersion, actual.Dataset.Version) + assert.Equal(t, artifactModel.ArtifactID, actual.GetId()) + assert.Equal(t, artifactModel.DatasetProject, actual.GetDataset().GetProject()) + assert.Equal(t, artifactModel.DatasetDomain, actual.GetDataset().GetDomain()) + assert.Equal(t, artifactModel.DatasetName, actual.GetDataset().GetName()) + assert.Equal(t, artifactModel.DatasetVersion, actual.GetDataset().GetVersion()) - assert.Len(t, actual.Partitions, 2) - assert.EqualValues(t, artifactModel.Partitions[0].Key, actual.Partitions[0].Key) - assert.EqualValues(t, artifactModel.Partitions[0].Value, actual.Partitions[0].Value) - assert.EqualValues(t, artifactModel.Partitions[1].Value, actual.Partitions[1].Value) - assert.EqualValues(t, artifactModel.Partitions[1].Value, actual.Partitions[1].Value) + assert.Len(t, actual.GetPartitions(), 2) + assert.EqualValues(t, artifactModel.Partitions[0].Key, actual.GetPartitions()[0].GetKey()) + assert.EqualValues(t, artifactModel.Partitions[0].Value, actual.GetPartitions()[0].GetValue()) + assert.EqualValues(t, artifactModel.Partitions[1].Value, actual.GetPartitions()[1].GetValue()) + assert.EqualValues(t, artifactModel.Partitions[1].Value, actual.GetPartitions()[1].GetValue()) - assert.Len(t, actual.Tags, 1) - assert.EqualValues(t, artifactModel.Tags[0].TagName, actual.Tags[0].Name) + assert.Len(t, actual.GetTags(), 1) + assert.EqualValues(t, artifactModel.Tags[0].TagName, actual.GetTags()[0].GetName()) timestampProto, err := ptypes.TimestampProto(createdAt) assert.NoError(t, err) - assert.Equal(t, actual.CreatedAt, timestampProto) + assert.Equal(t, actual.GetCreatedAt(), timestampProto) } func TestToArtifactKey(t *testing.T) { artifactKey := ToArtifactKey(datasetID, "artifactID-1") - assert.Equal(t, datasetID.Project, artifactKey.DatasetProject) - assert.Equal(t, datasetID.Domain, artifactKey.DatasetDomain) - assert.Equal(t, datasetID.Name, artifactKey.DatasetName) - assert.Equal(t, datasetID.Version, artifactKey.DatasetVersion) + assert.Equal(t, datasetID.GetProject(), artifactKey.DatasetProject) + assert.Equal(t, datasetID.GetDomain(), artifactKey.DatasetDomain) + assert.Equal(t, datasetID.GetName(), artifactKey.DatasetName) + assert.Equal(t, datasetID.GetVersion(), artifactKey.DatasetVersion) assert.Equal(t, artifactKey.ArtifactID, "artifactID-1") } diff --git a/datacatalog/pkg/repositories/transformers/dataset.go b/datacatalog/pkg/repositories/transformers/dataset.go index 9d5cb168a4..bc8f86e4be 100644 --- a/datacatalog/pkg/repositories/transformers/dataset.go +++ b/datacatalog/pkg/repositories/transformers/dataset.go @@ -7,12 +7,12 @@ import ( // Create a dataset model from the Dataset api object. This will serialize the metadata in the dataset as part of the transform func CreateDatasetModel(dataset *datacatalog.Dataset) (*models.Dataset, error) { - serializedMetadata, err := marshalMetadata(dataset.Metadata) + serializedMetadata, err := marshalMetadata(dataset.GetMetadata()) if err != nil { return nil, err } - partitionKeys := make([]models.PartitionKey, len(dataset.PartitionKeys)) + partitionKeys := make([]models.PartitionKey, len(dataset.GetPartitionKeys())) for i, partitionKey := range dataset.GetPartitionKeys() { partitionKeys[i] = models.PartitionKey{ @@ -22,11 +22,11 @@ func CreateDatasetModel(dataset *datacatalog.Dataset) (*models.Dataset, error) { return &models.Dataset{ DatasetKey: models.DatasetKey{ - Project: dataset.Id.Project, - Domain: dataset.Id.Domain, - Name: dataset.Id.Name, - Version: dataset.Id.Version, - UUID: dataset.Id.UUID, + Project: dataset.GetId().GetProject(), + Domain: dataset.GetId().GetDomain(), + Name: dataset.GetId().GetName(), + Version: dataset.GetId().GetVersion(), + UUID: dataset.GetId().GetUUID(), }, SerializedMetadata: serializedMetadata, PartitionKeys: partitionKeys, @@ -36,11 +36,11 @@ func CreateDatasetModel(dataset *datacatalog.Dataset) (*models.Dataset, error) { // Create a dataset ID from the dataset key model func FromDatasetID(datasetID *datacatalog.DatasetID) models.DatasetKey { return models.DatasetKey{ - Project: datasetID.Project, - Domain: datasetID.Domain, - Name: datasetID.Name, - Version: datasetID.Version, - UUID: datasetID.UUID, + Project: datasetID.GetProject(), + Domain: datasetID.GetDomain(), + Name: datasetID.GetName(), + Version: datasetID.GetVersion(), + UUID: datasetID.GetUUID(), } } diff --git a/datacatalog/pkg/repositories/transformers/dataset_test.go b/datacatalog/pkg/repositories/transformers/dataset_test.go index 25062cf264..39e0e7ec3d 100644 --- a/datacatalog/pkg/repositories/transformers/dataset_test.go +++ b/datacatalog/pkg/repositories/transformers/dataset_test.go @@ -25,11 +25,11 @@ var datasetID = &datacatalog.DatasetID{ } func assertDatasetIDEqualsModel(t *testing.T, idlDataset *datacatalog.DatasetID, model *models.DatasetKey) { - assert.Equal(t, idlDataset.Project, model.Project) - assert.Equal(t, idlDataset.Domain, model.Domain) - assert.Equal(t, idlDataset.Name, model.Name) - assert.Equal(t, idlDataset.Version, model.Version) - assert.Equal(t, idlDataset.UUID, model.UUID) + assert.Equal(t, idlDataset.GetProject(), model.Project) + assert.Equal(t, idlDataset.GetDomain(), model.Domain) + assert.Equal(t, idlDataset.GetName(), model.Name) + assert.Equal(t, idlDataset.GetVersion(), model.Version) + assert.Equal(t, idlDataset.GetUUID(), model.UUID) } func TestCreateDatasetModelNoParitions(t *testing.T) { @@ -40,11 +40,11 @@ func TestCreateDatasetModelNoParitions(t *testing.T) { datasetModel, err := CreateDatasetModel(dataset) assert.NoError(t, err) - assertDatasetIDEqualsModel(t, dataset.Id, &datasetModel.DatasetKey) + assertDatasetIDEqualsModel(t, dataset.GetId(), &datasetModel.DatasetKey) unmarshaledMetadata, err := unmarshalMetadata(datasetModel.SerializedMetadata) assert.NoError(t, err) - assert.EqualValues(t, unmarshaledMetadata.KeyMap, metadata.KeyMap) + assert.EqualValues(t, unmarshaledMetadata.GetKeyMap(), metadata.GetKeyMap()) assert.Len(t, datasetModel.PartitionKeys, 0) } @@ -58,15 +58,15 @@ func TestCreateDatasetModel(t *testing.T) { datasetModel, err := CreateDatasetModel(dataset) assert.NoError(t, err) - assertDatasetIDEqualsModel(t, dataset.Id, &datasetModel.DatasetKey) + assertDatasetIDEqualsModel(t, dataset.GetId(), &datasetModel.DatasetKey) unmarshaledMetadata, err := unmarshalMetadata(datasetModel.SerializedMetadata) assert.NoError(t, err) - assert.EqualValues(t, unmarshaledMetadata.KeyMap, metadata.KeyMap) + assert.EqualValues(t, unmarshaledMetadata.GetKeyMap(), metadata.GetKeyMap()) assert.Len(t, datasetModel.PartitionKeys, 2) - assert.Equal(t, datasetModel.PartitionKeys[0], models.PartitionKey{Name: dataset.PartitionKeys[0]}) - assert.Equal(t, datasetModel.PartitionKeys[1], models.PartitionKey{Name: dataset.PartitionKeys[1]}) + assert.Equal(t, datasetModel.PartitionKeys[0], models.PartitionKey{Name: dataset.GetPartitionKeys()[0]}) + assert.Equal(t, datasetModel.PartitionKeys[1], models.PartitionKey{Name: dataset.GetPartitionKeys()[1]}) } func TestFromDatasetID(t *testing.T) { @@ -86,9 +86,9 @@ func TestFromDatasetModelNoPartitionsOrMetadata(t *testing.T) { } dataset, err := FromDatasetModel(*datasetModel) assert.NoError(t, err) - assertDatasetIDEqualsModel(t, dataset.Id, &datasetModel.DatasetKey) - assert.Len(t, dataset.Metadata.KeyMap, 0) - assert.Len(t, dataset.PartitionKeys, 0) + assertDatasetIDEqualsModel(t, dataset.GetId(), &datasetModel.DatasetKey) + assert.Len(t, dataset.GetMetadata().GetKeyMap(), 0) + assert.Len(t, dataset.GetPartitionKeys(), 0) } func TestFromDatasetModelWithPartitions(t *testing.T) { @@ -108,8 +108,8 @@ func TestFromDatasetModelWithPartitions(t *testing.T) { } dataset, err := FromDatasetModel(*datasetModel) assert.NoError(t, err) - assertDatasetIDEqualsModel(t, dataset.Id, &datasetModel.DatasetKey) - assert.Len(t, dataset.Metadata.KeyMap, 2) - assert.EqualValues(t, dataset.Metadata.KeyMap, metadata.KeyMap) - assert.Len(t, dataset.PartitionKeys, 2) + assertDatasetIDEqualsModel(t, dataset.GetId(), &datasetModel.DatasetKey) + assert.Len(t, dataset.GetMetadata().GetKeyMap(), 2) + assert.EqualValues(t, dataset.GetMetadata().GetKeyMap(), metadata.GetKeyMap()) + assert.Len(t, dataset.GetPartitionKeys(), 2) } diff --git a/datacatalog/pkg/repositories/transformers/filters.go b/datacatalog/pkg/repositories/transformers/filters.go index c4ed8b6f08..0c6f083ee4 100644 --- a/datacatalog/pkg/repositories/transformers/filters.go +++ b/datacatalog/pkg/repositories/transformers/filters.go @@ -44,7 +44,7 @@ func FilterToListInput(ctx context.Context, sourceEntity common.Entity, filterEx } func constructModelFilter(ctx context.Context, singleFilter *datacatalog.SinglePropertyFilter, sourceEntity common.Entity) (models.ModelFilter, error) { - operator := comparisonOperatorMap[singleFilter.Operator] + operator := comparisonOperatorMap[singleFilter.GetOperator()] var modelFilter models.ModelFilter switch propertyFilter := singleFilter.GetPropertyFilter().(type) { @@ -53,8 +53,8 @@ func constructModelFilter(ctx context.Context, singleFilter *datacatalog.SingleP switch partitionProperty := partitionPropertyFilter.GetProperty().(type) { case *datacatalog.PartitionPropertyFilter_KeyVal: - key := partitionProperty.KeyVal.Key - value := partitionProperty.KeyVal.Value + key := partitionProperty.KeyVal.GetKey() + value := partitionProperty.KeyVal.GetValue() logger.Debugf(ctx, "Constructing partition key:[%v], val:[%v] filter", key, value) if err := validators.ValidateEmptyStringField(key, "PartitionKey"); err != nil { diff --git a/datacatalog/pkg/repositories/transformers/pagination.go b/datacatalog/pkg/repositories/transformers/pagination.go index 793779ab46..ed7a7925c1 100644 --- a/datacatalog/pkg/repositories/transformers/pagination.go +++ b/datacatalog/pkg/repositories/transformers/pagination.go @@ -23,18 +23,18 @@ func ApplyPagination(paginationOpts *datacatalog.PaginationOptions, input *model if paginationOpts != nil { // if the token is empty, that is still valid input since it is optional - if len(strings.Trim(paginationOpts.Token, " ")) == 0 { + if len(strings.Trim(paginationOpts.GetToken(), " ")) == 0 { offset = common.DefaultPageOffset } else { - parsedOffset, err := strconv.ParseInt(paginationOpts.Token, 10, 32) + parsedOffset, err := strconv.ParseInt(paginationOpts.GetToken(), 10, 32) if err != nil { return errors.NewDataCatalogErrorf(codes.InvalidArgument, "Invalid token %v", offset) } offset = int(parsedOffset) } - limit = int(paginationOpts.Limit) - sortKey = paginationOpts.SortKey - sortOrder = paginationOpts.SortOrder + limit = int(paginationOpts.GetLimit()) + sortKey = paginationOpts.GetSortKey() + sortOrder = paginationOpts.GetSortOrder() } input.Offset = offset diff --git a/datacatalog/pkg/repositories/transformers/reservation.go b/datacatalog/pkg/repositories/transformers/reservation.go index 2ae215be82..11edeb4f26 100644 --- a/datacatalog/pkg/repositories/transformers/reservation.go +++ b/datacatalog/pkg/repositories/transformers/reservation.go @@ -12,14 +12,14 @@ import ( ) func FromReservationID(reservationID *datacatalog.ReservationID) models.ReservationKey { - datasetID := reservationID.DatasetId + datasetID := reservationID.GetDatasetId() return models.ReservationKey{ - DatasetProject: datasetID.Project, - DatasetDomain: datasetID.Domain, - DatasetName: datasetID.Name, - DatasetVersion: datasetID.Version, - TagName: reservationID.TagName, + DatasetProject: datasetID.GetProject(), + DatasetDomain: datasetID.GetDomain(), + DatasetName: datasetID.GetName(), + DatasetVersion: datasetID.GetVersion(), + TagName: reservationID.GetTagName(), } } diff --git a/datacatalog/pkg/repositories/transformers/reservation_test.go b/datacatalog/pkg/repositories/transformers/reservation_test.go index 95ca7795ce..21b8e896fc 100644 --- a/datacatalog/pkg/repositories/transformers/reservation_test.go +++ b/datacatalog/pkg/repositories/transformers/reservation_test.go @@ -22,11 +22,11 @@ func TestFromReservationID(t *testing.T) { } reservationKey := FromReservationID(&reservationID) - assert.Equal(t, reservationKey.DatasetProject, reservationID.DatasetId.Project) - assert.Equal(t, reservationKey.DatasetName, reservationID.DatasetId.Name) - assert.Equal(t, reservationKey.DatasetDomain, reservationID.DatasetId.Domain) - assert.Equal(t, reservationKey.DatasetVersion, reservationID.DatasetId.Version) - assert.Equal(t, reservationKey.TagName, reservationID.TagName) + assert.Equal(t, reservationKey.DatasetProject, reservationID.GetDatasetId().GetProject()) + assert.Equal(t, reservationKey.DatasetName, reservationID.GetDatasetId().GetName()) + assert.Equal(t, reservationKey.DatasetDomain, reservationID.GetDatasetId().GetDomain()) + assert.Equal(t, reservationKey.DatasetVersion, reservationID.GetDatasetId().GetVersion()) + assert.Equal(t, reservationKey.TagName, reservationID.GetTagName()) } func TestCreateReservation(t *testing.T) { @@ -47,16 +47,16 @@ func TestCreateReservation(t *testing.T) { reservation, err := CreateReservation(&modelReservation, heartbeatInterval) assert.Equal(t, err, nil) - assert.Equal(t, reservation.ExpiresAt.AsTime(), modelReservation.ExpiresAt.UTC()) - assert.Equal(t, reservation.HeartbeatInterval.AsDuration(), heartbeatInterval) - assert.Equal(t, reservation.OwnerId, modelReservation.OwnerID) - - reservationID := reservation.ReservationId - assert.Equal(t, reservationID.TagName, modelReservation.TagName) - - datasetID := reservationID.DatasetId - assert.Equal(t, datasetID.Project, modelReservation.DatasetProject) - assert.Equal(t, datasetID.Name, modelReservation.DatasetName) - assert.Equal(t, datasetID.Domain, modelReservation.DatasetDomain) - assert.Equal(t, datasetID.Version, modelReservation.DatasetVersion) + assert.Equal(t, reservation.GetExpiresAt().AsTime(), modelReservation.ExpiresAt.UTC()) + assert.Equal(t, reservation.GetHeartbeatInterval().AsDuration(), heartbeatInterval) + assert.Equal(t, reservation.GetOwnerId(), modelReservation.OwnerID) + + reservationID := reservation.GetReservationId() + assert.Equal(t, reservationID.GetTagName(), modelReservation.TagName) + + datasetID := reservationID.GetDatasetId() + assert.Equal(t, datasetID.GetProject(), modelReservation.DatasetProject) + assert.Equal(t, datasetID.GetName(), modelReservation.DatasetName) + assert.Equal(t, datasetID.GetDomain(), modelReservation.DatasetDomain) + assert.Equal(t, datasetID.GetVersion(), modelReservation.DatasetVersion) } diff --git a/datacatalog/pkg/repositories/transformers/tag.go b/datacatalog/pkg/repositories/transformers/tag.go index df98e22200..7fe1f83220 100644 --- a/datacatalog/pkg/repositories/transformers/tag.go +++ b/datacatalog/pkg/repositories/transformers/tag.go @@ -7,10 +7,10 @@ import ( func ToTagKey(datasetID *datacatalog.DatasetID, tagName string) models.TagKey { return models.TagKey{ - DatasetProject: datasetID.Project, - DatasetDomain: datasetID.Domain, - DatasetName: datasetID.Name, - DatasetVersion: datasetID.Version, + DatasetProject: datasetID.GetProject(), + DatasetDomain: datasetID.GetDomain(), + DatasetName: datasetID.GetName(), + DatasetVersion: datasetID.GetVersion(), TagName: tagName, } } diff --git a/datacatalog/pkg/repositories/transformers/tag_test.go b/datacatalog/pkg/repositories/transformers/tag_test.go index c2820f6260..f77af243d6 100644 --- a/datacatalog/pkg/repositories/transformers/tag_test.go +++ b/datacatalog/pkg/repositories/transformers/tag_test.go @@ -22,10 +22,10 @@ func TestToTagKey(t *testing.T) { tagKey := ToTagKey(datasetID, tagName) assert.Equal(t, tagName, tagKey.TagName) - assert.Equal(t, datasetID.Project, tagKey.DatasetProject) - assert.Equal(t, datasetID.Domain, tagKey.DatasetDomain) - assert.Equal(t, datasetID.Name, tagKey.DatasetName) - assert.Equal(t, datasetID.Version, tagKey.DatasetVersion) + assert.Equal(t, datasetID.GetProject(), tagKey.DatasetProject) + assert.Equal(t, datasetID.GetDomain(), tagKey.DatasetDomain) + assert.Equal(t, datasetID.GetName(), tagKey.DatasetName) + assert.Equal(t, datasetID.GetVersion(), tagKey.DatasetVersion) } func TestFromTagModel(t *testing.T) { @@ -46,10 +46,10 @@ func TestFromTagModel(t *testing.T) { tag := FromTagModel(datasetID, tagModel) - assert.Equal(t, tag.Name, tagModel.TagName) - assert.Equal(t, datasetID.Project, tag.Dataset.Project) - assert.Equal(t, datasetID.Domain, tag.Dataset.Domain) - assert.Equal(t, datasetID.Name, tag.Dataset.Name) - assert.Equal(t, datasetID.Version, tag.Dataset.Version) - assert.Equal(t, datasetID.UUID, tag.Dataset.UUID) + assert.Equal(t, tag.GetName(), tagModel.TagName) + assert.Equal(t, datasetID.GetProject(), tag.GetDataset().GetProject()) + assert.Equal(t, datasetID.GetDomain(), tag.GetDataset().GetDomain()) + assert.Equal(t, datasetID.GetName(), tag.GetDataset().GetName()) + assert.Equal(t, datasetID.GetVersion(), tag.GetDataset().GetVersion()) + assert.Equal(t, datasetID.GetUUID(), tag.GetDataset().GetUUID()) } diff --git a/datacatalog/pkg/repositories/transformers/util_test.go b/datacatalog/pkg/repositories/transformers/util_test.go index bdbd1c642c..1d0c666e82 100644 --- a/datacatalog/pkg/repositories/transformers/util_test.go +++ b/datacatalog/pkg/repositories/transformers/util_test.go @@ -12,7 +12,7 @@ func TestMarshaling(t *testing.T) { unmarshaledMetadata, err := unmarshalMetadata(marshaledMetadata) assert.NoError(t, err) - assert.EqualValues(t, unmarshaledMetadata.KeyMap, metadata.KeyMap) + assert.EqualValues(t, unmarshaledMetadata.GetKeyMap(), metadata.GetKeyMap()) } func TestMarshalingWithNil(t *testing.T) { @@ -21,5 +21,5 @@ func TestMarshalingWithNil(t *testing.T) { var expectedKeymap map[string]string unmarshaledMetadata, err := unmarshalMetadata(marshaledMetadata) assert.NoError(t, err) - assert.EqualValues(t, expectedKeymap, unmarshaledMetadata.KeyMap) + assert.EqualValues(t, expectedKeymap, unmarshaledMetadata.GetKeyMap()) } diff --git a/deployment/agent/flyte_agent_helm_generated.yaml b/deployment/agent/flyte_agent_helm_generated.yaml index e90de9a379..0c6faf0e6d 100644 --- a/deployment/agent/flyte_agent_helm_generated.yaml +++ b/deployment/agent/flyte_agent_helm_generated.yaml @@ -79,7 +79,7 @@ spec: - pyflyte - serve - agent - image: "cr.flyte.org/flyteorg/flyteagent:1.13.6" + image: "cr.flyte.org/flyteorg/flyteagent:1.14.2" imagePullPolicy: "IfNotPresent" name: flyteagent volumeMounts: diff --git a/deployment/eks/flyte_aws_scheduler_helm_generated.yaml b/deployment/eks/flyte_aws_scheduler_helm_generated.yaml index d7cb3500d6..389e6ac87a 100644 --- a/deployment/eks/flyte_aws_scheduler_helm_generated.yaml +++ b/deployment/eks/flyte_aws_scheduler_helm_generated.yaml @@ -436,7 +436,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -461,6 +461,8 @@ data: renew-deadline: 10s retry-period: 2s limit-namespace: all + literal-offloading-config: + enabled: true max-workflow-retries: 50 metadata-prefix: metadata/propeller metrics-prefix: flyte @@ -751,22 +753,18 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8088 - name: grpc port: 81 protocol: TCP # intentionally set to TCP instead of grpc - appProtocol: TCP targetPort: 8089 - name: redoc protocol: TCP - appProtocol: TCP port: 87 targetPort: 8087 - name: http-metrics protocol: TCP - appProtocol: TCP port: 10254 selector: app.kubernetes.io/name: flyteadmin @@ -789,7 +787,6 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8080 selector: app.kubernetes.io/name: flyteconsole @@ -882,7 +879,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations securityContext: @@ -903,7 +900,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: seed-projects securityContext: @@ -921,7 +918,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources securityContext: @@ -938,7 +935,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -965,7 +962,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -1072,7 +1069,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -1128,7 +1125,7 @@ spec: seLinuxOptions: type: spc_t containers: - - image: "cr.flyte.org/flyteorg/flyteconsole:v1.17.1" + - image: "cr.flyte.org/flyteorg/flyteconsole:v1.19.0" imagePullPolicy: "IfNotPresent" name: flyteconsole envFrom: @@ -1202,7 +1199,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -1220,7 +1217,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1283,7 +1280,7 @@ spec: template: metadata: annotations: - configChecksum: "6572aa999f8e6842b4dba120e12e6ccb8cdfa506373de2a267b62a63146ccde" + configChecksum: "d506150c845dea702ff708615390d9ed7dfac19b77a88429ed888b08fc6c9db" prometheus.io/path: "/metrics" prometheus.io/port: "10254" labels: @@ -1311,7 +1308,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -1365,9 +1362,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.13.2 + app.kubernetes.io/version: v1.14.1 annotations: - configChecksum: "6572aa999f8e6842b4dba120e12e6ccb8cdfa506373de2a267b62a63146ccde" + configChecksum: "d506150c845dea702ff708615390d9ed7dfac19b77a88429ed888b08fc6c9db" prometheus.io/path: "/metrics" prometheus.io/port: "10254" spec: @@ -1381,7 +1378,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -1408,7 +1405,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/eks/flyte_helm_controlplane_generated.yaml b/deployment/eks/flyte_helm_controlplane_generated.yaml index 60ca7d1720..7cc622b92e 100644 --- a/deployment/eks/flyte_helm_controlplane_generated.yaml +++ b/deployment/eks/flyte_helm_controlplane_generated.yaml @@ -474,22 +474,18 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8088 - name: grpc port: 81 protocol: TCP # intentionally set to TCP instead of grpc - appProtocol: TCP targetPort: 8089 - name: redoc protocol: TCP - appProtocol: TCP port: 87 targetPort: 8087 - name: http-metrics protocol: TCP - appProtocol: TCP port: 10254 selector: app.kubernetes.io/name: flyteadmin @@ -512,7 +508,6 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8080 selector: app.kubernetes.io/name: flyteconsole @@ -587,7 +582,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations securityContext: @@ -608,7 +603,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: seed-projects securityContext: @@ -626,7 +621,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources securityContext: @@ -643,7 +638,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -670,7 +665,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -777,7 +772,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -833,7 +828,7 @@ spec: seLinuxOptions: type: spc_t containers: - - image: "cr.flyte.org/flyteorg/flyteconsole:v1.17.1" + - image: "cr.flyte.org/flyteorg/flyteconsole:v1.19.0" imagePullPolicy: "IfNotPresent" name: flyteconsole envFrom: @@ -907,7 +902,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -925,7 +920,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1008,7 +1003,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler-check securityContext: @@ -1028,7 +1023,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: diff --git a/deployment/eks/flyte_helm_dataplane_generated.yaml b/deployment/eks/flyte_helm_dataplane_generated.yaml index 682d1cef01..75235afbda 100644 --- a/deployment/eks/flyte_helm_dataplane_generated.yaml +++ b/deployment/eks/flyte_helm_dataplane_generated.yaml @@ -94,7 +94,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -119,6 +119,8 @@ data: renew-deadline: 10s retry-period: 2s limit-namespace: all + literal-offloading-config: + enabled: true max-workflow-retries: 50 metadata-prefix: metadata/propeller metrics-prefix: flyte @@ -428,7 +430,7 @@ spec: template: metadata: annotations: - configChecksum: "6572aa999f8e6842b4dba120e12e6ccb8cdfa506373de2a267b62a63146ccde" + configChecksum: "d506150c845dea702ff708615390d9ed7dfac19b77a88429ed888b08fc6c9db" prometheus.io/path: "/metrics" prometheus.io/port: "10254" labels: @@ -456,7 +458,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -510,9 +512,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.13.2 + app.kubernetes.io/version: v1.14.1 annotations: - configChecksum: "6572aa999f8e6842b4dba120e12e6ccb8cdfa506373de2a267b62a63146ccde" + configChecksum: "d506150c845dea702ff708615390d9ed7dfac19b77a88429ed888b08fc6c9db" prometheus.io/path: "/metrics" prometheus.io/port: "10254" spec: @@ -526,7 +528,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -553,7 +555,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/eks/flyte_helm_generated.yaml b/deployment/eks/flyte_helm_generated.yaml index 5e0ae72ec2..efaef1701c 100644 --- a/deployment/eks/flyte_helm_generated.yaml +++ b/deployment/eks/flyte_helm_generated.yaml @@ -467,7 +467,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -492,6 +492,8 @@ data: renew-deadline: 10s retry-period: 2s limit-namespace: all + literal-offloading-config: + enabled: true max-workflow-retries: 50 metadata-prefix: metadata/propeller metrics-prefix: flyte @@ -782,22 +784,18 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8088 - name: grpc port: 81 protocol: TCP # intentionally set to TCP instead of grpc - appProtocol: TCP targetPort: 8089 - name: redoc protocol: TCP - appProtocol: TCP port: 87 targetPort: 8087 - name: http-metrics protocol: TCP - appProtocol: TCP port: 10254 selector: app.kubernetes.io/name: flyteadmin @@ -820,7 +818,6 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8080 selector: app.kubernetes.io/name: flyteconsole @@ -913,7 +910,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations securityContext: @@ -934,7 +931,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: seed-projects securityContext: @@ -952,7 +949,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources securityContext: @@ -969,7 +966,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -996,7 +993,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -1103,7 +1100,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -1159,7 +1156,7 @@ spec: seLinuxOptions: type: spc_t containers: - - image: "cr.flyte.org/flyteorg/flyteconsole:v1.17.1" + - image: "cr.flyte.org/flyteorg/flyteconsole:v1.19.0" imagePullPolicy: "IfNotPresent" name: flyteconsole envFrom: @@ -1233,7 +1230,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -1251,7 +1248,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1334,7 +1331,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler-check securityContext: @@ -1354,7 +1351,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: @@ -1413,7 +1410,7 @@ spec: template: metadata: annotations: - configChecksum: "6572aa999f8e6842b4dba120e12e6ccb8cdfa506373de2a267b62a63146ccde" + configChecksum: "d506150c845dea702ff708615390d9ed7dfac19b77a88429ed888b08fc6c9db" prometheus.io/path: "/metrics" prometheus.io/port: "10254" labels: @@ -1441,7 +1438,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -1495,9 +1492,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.13.2 + app.kubernetes.io/version: v1.14.1 annotations: - configChecksum: "6572aa999f8e6842b4dba120e12e6ccb8cdfa506373de2a267b62a63146ccde" + configChecksum: "d506150c845dea702ff708615390d9ed7dfac19b77a88429ed888b08fc6c9db" prometheus.io/path: "/metrics" prometheus.io/port: "10254" spec: @@ -1511,7 +1508,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -1538,7 +1535,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/gcp/flyte_helm_controlplane_generated.yaml b/deployment/gcp/flyte_helm_controlplane_generated.yaml index 29367a5b37..6986194eaf 100644 --- a/deployment/gcp/flyte_helm_controlplane_generated.yaml +++ b/deployment/gcp/flyte_helm_controlplane_generated.yaml @@ -488,22 +488,18 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8088 - name: grpc port: 81 protocol: TCP # intentionally set to TCP instead of grpc - appProtocol: TCP targetPort: 8089 - name: redoc protocol: TCP - appProtocol: TCP port: 87 targetPort: 8087 - name: http-metrics protocol: TCP - appProtocol: TCP port: 10254 selector: app.kubernetes.io/name: flyteadmin @@ -526,7 +522,6 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8080 selector: app.kubernetes.io/name: flyteconsole @@ -602,7 +597,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations securityContext: @@ -623,7 +618,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: seed-projects securityContext: @@ -641,7 +636,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources securityContext: @@ -658,7 +653,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -685,7 +680,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -792,7 +787,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -848,7 +843,7 @@ spec: seLinuxOptions: type: spc_t containers: - - image: "cr.flyte.org/flyteorg/flyteconsole:v1.17.1" + - image: "cr.flyte.org/flyteorg/flyteconsole:v1.19.0" imagePullPolicy: "IfNotPresent" name: flyteconsole envFrom: @@ -922,7 +917,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -940,7 +935,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1023,7 +1018,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler-check securityContext: @@ -1043,7 +1038,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: diff --git a/deployment/gcp/flyte_helm_dataplane_generated.yaml b/deployment/gcp/flyte_helm_dataplane_generated.yaml index 8196b38520..53639f07e8 100644 --- a/deployment/gcp/flyte_helm_dataplane_generated.yaml +++ b/deployment/gcp/flyte_helm_dataplane_generated.yaml @@ -94,7 +94,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -119,6 +119,8 @@ data: renew-deadline: 10s retry-period: 2s limit-namespace: all + literal-offloading-config: + enabled: true max-workflow-retries: 50 metadata-prefix: metadata/propeller metrics-prefix: flyte @@ -436,7 +438,7 @@ spec: template: metadata: annotations: - configChecksum: "8562f7f608d4936e13f6ad70c18c7c095068e742243e7f380f89694d2182110" + configChecksum: "7fe8a43c0f65a1b452812fb5a5bc74f81f40948314d2e4bcf385091330739e4" prometheus.io/path: "/metrics" prometheus.io/port: "10254" labels: @@ -463,7 +465,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -517,9 +519,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.13.2 + app.kubernetes.io/version: v1.14.1 annotations: - configChecksum: "8562f7f608d4936e13f6ad70c18c7c095068e742243e7f380f89694d2182110" + configChecksum: "7fe8a43c0f65a1b452812fb5a5bc74f81f40948314d2e4bcf385091330739e4" prometheus.io/path: "/metrics" prometheus.io/port: "10254" spec: @@ -533,7 +535,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -560,7 +562,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/gcp/flyte_helm_generated.yaml b/deployment/gcp/flyte_helm_generated.yaml index ce1f64c1df..8da805242b 100644 --- a/deployment/gcp/flyte_helm_generated.yaml +++ b/deployment/gcp/flyte_helm_generated.yaml @@ -480,7 +480,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -505,6 +505,8 @@ data: renew-deadline: 10s retry-period: 2s limit-namespace: all + literal-offloading-config: + enabled: true max-workflow-retries: 50 metadata-prefix: metadata/propeller metrics-prefix: flyte @@ -804,22 +806,18 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8088 - name: grpc port: 81 protocol: TCP # intentionally set to TCP instead of grpc - appProtocol: TCP targetPort: 8089 - name: redoc protocol: TCP - appProtocol: TCP port: 87 targetPort: 8087 - name: http-metrics protocol: TCP - appProtocol: TCP port: 10254 selector: app.kubernetes.io/name: flyteadmin @@ -842,7 +840,6 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8080 selector: app.kubernetes.io/name: flyteconsole @@ -936,7 +933,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations securityContext: @@ -957,7 +954,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: seed-projects securityContext: @@ -975,7 +972,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources securityContext: @@ -992,7 +989,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -1019,7 +1016,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -1126,7 +1123,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -1182,7 +1179,7 @@ spec: seLinuxOptions: type: spc_t containers: - - image: "cr.flyte.org/flyteorg/flyteconsole:v1.17.1" + - image: "cr.flyte.org/flyteorg/flyteconsole:v1.19.0" imagePullPolicy: "IfNotPresent" name: flyteconsole envFrom: @@ -1256,7 +1253,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -1274,7 +1271,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1357,7 +1354,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler-check securityContext: @@ -1377,7 +1374,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: @@ -1436,7 +1433,7 @@ spec: template: metadata: annotations: - configChecksum: "8562f7f608d4936e13f6ad70c18c7c095068e742243e7f380f89694d2182110" + configChecksum: "7fe8a43c0f65a1b452812fb5a5bc74f81f40948314d2e4bcf385091330739e4" prometheus.io/path: "/metrics" prometheus.io/port: "10254" labels: @@ -1463,7 +1460,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -1517,9 +1514,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.13.2 + app.kubernetes.io/version: v1.14.1 annotations: - configChecksum: "8562f7f608d4936e13f6ad70c18c7c095068e742243e7f380f89694d2182110" + configChecksum: "7fe8a43c0f65a1b452812fb5a5bc74f81f40948314d2e4bcf385091330739e4" prometheus.io/path: "/metrics" prometheus.io/port: "10254" spec: @@ -1533,7 +1530,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -1560,7 +1557,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml b/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml index 6fafa61550..da4eff30b0 100644 --- a/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml +++ b/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml @@ -79,6 +79,8 @@ data: level: 1 propeller: create-flyteworkflow-crd: true + literal-offloading-config: + enabled: true webhook: certDir: /var/run/flyte/certs localCert: true @@ -120,7 +122,7 @@ data: stackdriver-enabled: false k8s: co-pilot: - image: "cr.flyte.org/flyteorg/flytecopilot:v1.13.2" + image: "cr.flyte.org/flyteorg/flytecopilot:v1.14.1" k8s-array: logs: config: @@ -363,7 +365,7 @@ spec: app.kubernetes.io/instance: flyte app.kubernetes.io/component: flyte-binary annotations: - checksum/configuration: 886440a42b3eeec802cfe60d37885f69e35ffd83e53e625b3c877da5e8c7eb38 + checksum/configuration: c4d2a06ca3d956873404b4a1768afc7f8e23b111adac749590cda61ef1f8df3c checksum/configuration-secret: d5d93f4e67780b21593dc3799f0f6682aab0765e708e4020939975d14d44f929 checksum/cluster-resource-templates: 7dfa59f3d447e9c099b8f8ffad3af466fecbc9cf9f8c97295d9634254a55d4ae spec: diff --git a/deployment/sandbox/flyte_helm_generated.yaml b/deployment/sandbox/flyte_helm_generated.yaml index 22b4855352..10f998e10a 100644 --- a/deployment/sandbox/flyte_helm_generated.yaml +++ b/deployment/sandbox/flyte_helm_generated.yaml @@ -592,7 +592,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.13.2 + image: cr.flyte.org/flyteorg/flytecopilot:v1.14.1 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -612,6 +612,8 @@ data: renew-deadline: 10s retry-period: 2s limit-namespace: all + literal-offloading-config: + enabled: true max-workflow-retries: 30 metadata-prefix: metadata/propeller metrics-prefix: flyte @@ -6145,22 +6147,18 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8088 - name: grpc port: 81 protocol: TCP # intentionally set to TCP instead of grpc - appProtocol: TCP targetPort: 8089 - name: redoc protocol: TCP - appProtocol: TCP port: 87 targetPort: 8087 - name: http-metrics protocol: TCP - appProtocol: TCP port: 10254 selector: app.kubernetes.io/name: flyteadmin @@ -6183,7 +6181,6 @@ spec: - name: http port: 80 protocol: TCP - appProtocol: TCP targetPort: 8080 selector: app.kubernetes.io/name: flyteconsole @@ -6720,7 +6717,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations securityContext: @@ -6740,7 +6737,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: seed-projects securityContext: @@ -6757,7 +6754,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources securityContext: @@ -6773,7 +6770,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -6800,7 +6797,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -6897,7 +6894,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.13.2" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.14.1" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -6950,7 +6947,7 @@ spec: seLinuxOptions: type: spc_t containers: - - image: "cr.flyte.org/flyteorg/flyteconsole:v1.17.1" + - image: "cr.flyte.org/flyteorg/flyteconsole:v1.19.0" imagePullPolicy: "IfNotPresent" name: flyteconsole envFrom: @@ -7022,7 +7019,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -7039,7 +7036,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.13.2" + image: "cr.flyte.org/flyteorg/datacatalog:v1.14.1" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -7112,7 +7109,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler-check securityContext: @@ -7131,7 +7128,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.13.2" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: @@ -7187,7 +7184,7 @@ spec: template: metadata: annotations: - configChecksum: "7ab9aee83ad8109354235eee7f46c3f091d7c70cd55157a195f4997d247f933" + configChecksum: "bfdc13c324a45054cc825e6818d1b51aaf856095139d6eb6ab06408c8f0432a" prometheus.io/path: "/metrics" prometheus.io/port: "10254" labels: @@ -7214,7 +7211,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -7261,9 +7258,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.13.2 + app.kubernetes.io/version: v1.14.1 annotations: - configChecksum: "7ab9aee83ad8109354235eee7f46c3f091d7c70cd55157a195f4997d247f933" + configChecksum: "bfdc13c324a45054cc825e6818d1b51aaf856095139d6eb6ab06408c8f0432a" prometheus.io/path: "/metrics" prometheus.io/port: "10254" spec: @@ -7277,7 +7274,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -7304,7 +7301,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.13.2" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.14.1" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/docker/sandbox-bundled/manifests/complete-agent.yaml b/docker/sandbox-bundled/manifests/complete-agent.yaml index 028f719e71..6d2e9c4b4e 100644 --- a/docker/sandbox-bundled/manifests/complete-agent.yaml +++ b/docker/sandbox-bundled/manifests/complete-agent.yaml @@ -431,6 +431,8 @@ data: level: 5 propeller: create-flyteworkflow-crd: true + literal-offloading-config: + enabled: true webhook: certDir: /var/run/flyte/certs localCert: true @@ -473,7 +475,7 @@ data: stackdriver-enabled: false k8s: co-pilot: - image: "cr.flyte.org/flyteorg/flytecopilot:v1.13.2" + image: "cr.flyte.org/flyteorg/flytecopilot:v1.14.1" k8s-array: logs: config: @@ -517,6 +519,8 @@ data: - FLYTE_AWS_ENDPOINT: http://flyte-sandbox-minio.flyte:9000 - FLYTE_AWS_ACCESS_KEY_ID: minio - FLYTE_AWS_SECRET_ACCESS_KEY: miniostorage + - FLYTE_PLATFORM_URL: 'flyte-sandbox-grpc.flyte:8089' + - FLYTE_PLATFORM_INSECURE: true storage: signedURL: stowConfigOverride: @@ -819,7 +823,7 @@ type: Opaque --- apiVersion: v1 data: - haSharedSecret: SlI1TDFkTXBMaThuc0hlSQ== + haSharedSecret: YUNORVNVRjBKVFpMTjVzZg== proxyPassword: "" proxyUsername: "" kind: Secret @@ -1250,7 +1254,7 @@ spec: metadata: annotations: checksum/cluster-resource-templates: 6fd9b172465e3089fcc59f738b92b8dc4d8939360c19de8ee65f68b0e7422035 - checksum/configuration: a823eaadac5f3a4358c8acf628ebeb3719f88312af520d2c253de2579dff262d + checksum/configuration: 5a537c05dbd27a7f2884eb78f4e762205c3bcc3248ab9e509ab7074c7e5f953d checksum/configuration-secret: 09216ffaa3d29e14f88b1f30af580d02a2a5e014de4d750b7f275cc07ed4e914 labels: app.kubernetes.io/component: flyte-binary @@ -1416,7 +1420,7 @@ spec: metadata: annotations: checksum/config: 8f50e768255a87f078ba8b9879a0c174c3e045ffb46ac8723d2eedbe293c8d81 - checksum/secret: ffc8aa05a602edd8f9b1d7ef35aa1cc5e383bceb9b91307eef99e86f53e13d4e + checksum/secret: 89b23b61fb0b3e8423af547ebb08fefb82a79836f1eaaf90e838ebcb71bb2a1b labels: app: docker-registry release: flyte-sandbox @@ -1759,7 +1763,7 @@ spec: value: minio - name: FLYTE_AWS_SECRET_ACCESS_KEY value: miniostorage - image: cr.flyte.org/flyteorg/flyteagent:1.13.6 + image: cr.flyte.org/flyteorg/flyteagent:1.14.2 imagePullPolicy: IfNotPresent name: flyteagent ports: diff --git a/docker/sandbox-bundled/manifests/complete.yaml b/docker/sandbox-bundled/manifests/complete.yaml index c8b8e1c93a..9f8f7fe5cf 100644 --- a/docker/sandbox-bundled/manifests/complete.yaml +++ b/docker/sandbox-bundled/manifests/complete.yaml @@ -420,6 +420,8 @@ data: level: 5 propeller: create-flyteworkflow-crd: true + literal-offloading-config: + enabled: true webhook: certDir: /var/run/flyte/certs localCert: true @@ -462,7 +464,7 @@ data: stackdriver-enabled: false k8s: co-pilot: - image: "cr.flyte.org/flyteorg/flytecopilot:v1.13.2" + image: "cr.flyte.org/flyteorg/flytecopilot:v1.14.1" k8s-array: logs: config: @@ -499,6 +501,8 @@ data: - FLYTE_AWS_ENDPOINT: http://flyte-sandbox-minio.flyte:9000 - FLYTE_AWS_ACCESS_KEY_ID: minio - FLYTE_AWS_SECRET_ACCESS_KEY: miniostorage + - FLYTE_PLATFORM_URL: 'flyte-sandbox-grpc.flyte:8089' + - FLYTE_PLATFORM_INSECURE: true storage: signedURL: stowConfigOverride: @@ -801,7 +805,7 @@ type: Opaque --- apiVersion: v1 data: - haSharedSecret: YjdMdE9yejJzZ2xXSDFBRQ== + haSharedSecret: YmtheFJhUGp0WTh5dEo4Ug== proxyPassword: "" proxyUsername: "" kind: Secret @@ -1199,7 +1203,7 @@ spec: metadata: annotations: checksum/cluster-resource-templates: 6fd9b172465e3089fcc59f738b92b8dc4d8939360c19de8ee65f68b0e7422035 - checksum/configuration: c2649df6bcb523f120c73b0fdeec5d9516f555eab12e4eae78b04dea2cf2abae + checksum/configuration: 6c8ab25f10e8bdc025e51493a47ff9c9f42a46026ea70284ad804e05f5883a9a checksum/configuration-secret: 09216ffaa3d29e14f88b1f30af580d02a2a5e014de4d750b7f275cc07ed4e914 labels: app.kubernetes.io/component: flyte-binary @@ -1365,7 +1369,7 @@ spec: metadata: annotations: checksum/config: 8f50e768255a87f078ba8b9879a0c174c3e045ffb46ac8723d2eedbe293c8d81 - checksum/secret: 956ac1b58c049a630c94605eedaba7ba9de3fc01233701ef403ab4bf24fe2a7a + checksum/secret: 249d45f57d99d60d9ba687a2b5451154fa1e5a1be8c5e399d769b7e0c73c5aaa labels: app: docker-registry release: flyte-sandbox diff --git a/docker/sandbox-bundled/manifests/dev.yaml b/docker/sandbox-bundled/manifests/dev.yaml index 1038da1f64..8800e6d9ad 100644 --- a/docker/sandbox-bundled/manifests/dev.yaml +++ b/docker/sandbox-bundled/manifests/dev.yaml @@ -499,7 +499,7 @@ metadata: --- apiVersion: v1 data: - haSharedSecret: YUpzb25xNTM1eml3Rmpueg== + haSharedSecret: UU1hc3Z1dWJ0YVB4R01vZA== proxyPassword: "" proxyUsername: "" kind: Secret @@ -934,7 +934,7 @@ spec: metadata: annotations: checksum/config: 8f50e768255a87f078ba8b9879a0c174c3e045ffb46ac8723d2eedbe293c8d81 - checksum/secret: 2720f13bd64051a7acb512e59e426b9f6c5f6c3c7d1d9a3a423e2df4cf9bab46 + checksum/secret: ad2c03695a15547e840a4affe09ef070307393f9c4195ace08231ffaad4f7971 labels: app: docker-registry release: flyte-sandbox diff --git a/docker/sandbox/Dockerfile b/docker/sandbox/Dockerfile index 2a77a20472..40c3e83e83 100644 --- a/docker/sandbox/Dockerfile +++ b/docker/sandbox/Dockerfile @@ -52,13 +52,13 @@ COPY --from=base /flyteorg/ /flyteorg/ COPY docker/sandbox/flyte-entrypoint-default.sh /flyteorg/bin/flyte-entrypoint.sh ARG FLYTE_VERSION="latest" -ENV FLYTE_VERSION "${FLYTE_VERSION}" +ENV FLYTE_VERSION="${FLYTE_VERSION}" ARG FLYTE_TEST="release" -ENV FLYTE_TEST "${FLYTE_TEST}" +ENV FLYTE_TEST="${FLYTE_TEST}" # Update PATH variable -ENV PATH "/flyteorg/bin:${PATH}" +ENV PATH="/flyteorg/bin:${PATH}" # Declare volumes for k3s VOLUME /var/lib/kubelet diff --git a/docs/README.md b/docs/README.md index dc5cb7046c..576948df3a 100644 --- a/docs/README.md +++ b/docs/README.md @@ -24,6 +24,11 @@ This creates a new environment called `monodocs-env` with all the dependencies n In the `flyteorg/flyte` root directory make sure you have activated the `monodocs-env` (or whatever you called it) environment and do: +```bash +# need to set this to a fake value to build the docs locally +$ export DOCSEARCH_API_KEY=fake-api-key +``` + ```bash $ make docs ``` diff --git a/docs/_static/custom.js b/docs/_static/custom.js index 573b21e6fd..dd58574656 100644 --- a/docs/_static/custom.js +++ b/docs/_static/custom.js @@ -1,10 +1,17 @@ -window.addEventListener("DOMContentLoaded", function() { - // Select all elements with class "external" - var externalLinks = document.querySelectorAll("a.external"); +window.addEventListener("DOMContentLoaded", function () { + // Select all elements with class "external" + var externalLinks = document.querySelectorAll("a.external"); - // Loop through each element with class "external" - externalLinks.forEach(function(link) { - // Set the target attribute to "_blank" - link.setAttribute("target", "_blank"); - }); + // Loop through each element with class "external" + externalLinks.forEach(function (link) { + // Set the target attribute to "_blank" + link.setAttribute("target", "_blank"); + }); + + // Remove the default search dialog if it exists (on CMD + K) + // This collides with Algolia's search dialog + const searchDialog = document.getElementById("pst-search-dialog"); + if (searchDialog) { + searchDialog.remove(); + } }); diff --git a/docs/community/contribute/contribute_code.rst b/docs/community/contribute/contribute_code.rst index c0cae7dade..21b05e20a2 100644 --- a/docs/community/contribute/contribute_code.rst +++ b/docs/community/contribute/contribute_code.rst @@ -25,12 +25,38 @@ To understand how the below components interact with each other, refer to :ref:` * - **Purpose**: Deployment, Documentation, and Issues * - **Languages**: RST -To build the Flyte docs locally you will need the following prerequisites: +In the ``flyteorg/flyte`` root directory you can run ``make dev-docs`` to build the documentation locally. The generated documentation will be in the ``docs/_build/html`` directory. +**Setup process** + +1. First you need to make sure you can run linux/amd64 container +2. Run the following commands to build the documentation and serve it locally + +.. prompt:: bash $ + + make dev-docs + python -m http.server --directory docs/_build/html + +3. Go to http://localhost:8000 to see the documentation. + +**Supported environment variables of** ``make dev-docs`` + +* ``DEV_DOCS_WATCH``: If set, the docs will be built and served using `sphinx-autobuild `__ for live updates. +* ``FLYTEKIT_LOCAL_PATH``: If set, the local path to flytekit will be used instead of the source code from the ``flyteorg/flytekit repo``. +* ``FLYTECTL_LOCAL_PATH``: If set, the local path to flytectl will be used instead of the source code from the ``flyteorg/flytectl repo``. +* ``FLYTESNACKS_LOCAL_PATH``: If set, the local path to flytesnacks will be used instead of the source code from the ``flyteorg/flytesnacks`` repo. + +For example, to use the local flytekit source code instead of the source code from the ``flyteorg/flytekit`` repo, run ``export FLYTEKIT_LOCAL_PATH=/path/to/flytekit`` before running ``make dev-docs``. + +**Alternative conda setup steps** + +* Install ``conda``. + * We recommend Miniconda installed with an `official installer `__. * Install `conda-lock `__. -* In the ``flyteorg/flyte`` root directory you can run: - * ``make dev-docs`` to build the documentation locally. The build will be in the ``docs/_build/html`` directory. See `the script `__ for additional environment variables that can be set. - * For example, to use the local flytekit source code instead of the source code from the flyteorg/flytekit repo, run ``export FLYTEKIT_LOCAL_PATH=/path/to/flytekit`` before running ``make dev-docs``. +* In the ``flyteorg/flyte`` root directory run: + * ``conda-lock install --name monodocs-env monodocs-environment.lock.yaml`` + * ``conda activate monodocs-env`` + * ``pip install ./flyteidl`` ``flyteidl`` ************ @@ -267,7 +293,7 @@ that integrates all Flyte components into a single binary. # Step 4: Running the single binary. # The POD_NAMESPACE environment variable is necessary for the webhook to function correctly. # You may encounter an error due to `ERROR: duplicate key value violates unique constraint`. Running the command again will solve the problem. - POD_NAMESPACE=flyte ./flyte start --config flyte-single-binary-local.yaml + POD_NAMESPACE=flyte flyte start --config flyte-single-binary-local.yaml # All logs from flyteadmin, flyteplugins, flytepropeller, etc. will appear in the terminal. @@ -301,7 +327,7 @@ The following instructions provide guidance on how to build single binary with y # Step 3: Now, you can build the single binary. Go back to Flyte directory. make go-tidy make compile - POD_NAMESPACE=flyte ./flyte start --config flyte-single-binary-local.yaml + POD_NAMESPACE=flyte flyte start --config flyte-single-binary-local.yaml **5. Test by running a hello world workflow.** @@ -403,7 +429,7 @@ If not, we can start backends with a single command. Before running your workflow in the sandbox, make sure you're able to successfully run it locally. To deploy the workflow in the sandbox, you'll need to build a Flytekit image. Create a Dockerfile in your Flytekit directory with the minimum required configuration to run a task, as shown below. -If your task requires additional components, such as plugins, you may find it useful to refer to the construction of the `officail flitekit image `__ +If your task requires additional components, such as plugins, you may find it useful to refer to the construction of the `official flytekit image `__ .. code:: Dockerfile diff --git a/docs/community/contribute/contribute_docs.md b/docs/community/contribute/contribute_docs.md index f97152032b..7c1d47ffc8 100644 --- a/docs/community/contribute/contribute_docs.md +++ b/docs/community/contribute/contribute_docs.md @@ -82,14 +82,14 @@ The following are some tips to include various content: * **Source code references (Embedded format)**
`.rst` example: ```{code-block} - .. rli:: https://raw.githubusercontent.com/flyteorg/// + .. literalinclude:: /examples/ :lines: - ``` `.md` example: ````{code-block} - ```{rli} https://raw.githubusercontent.com/flyteorg/// - lines: - + ```{literalinclude} /examples/ + :lines: - ``` ```` diff --git a/docs/community/troubleshoot.rst b/docs/community/troubleshoot.rst index 692e3c2aed..2a1b620515 100644 --- a/docs/community/troubleshoot.rst +++ b/docs/community/troubleshoot.rst @@ -176,3 +176,34 @@ Please add ``spark`` to the list of `enabled-plugins` in the config yaml file. F default-for-task-types: - container: container - container_array: K8S-ARRAY + +``authentication handshake failed: x509: "Kubernetes Ingress Controller Fake Certificate" certificate is not trusted"`` when deploying flyte-core to your own kubernetes cluster +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This issue is caused by TLS being disabled in your Kubernetes cluster. You can resolve the problem by following these steps: + +- Enable ``tls`` in the ``values.yaml`` ingress configuration of flyte-core in order to expose gRPC service at 443 port: + +.. code-block:: yaml + + ingress: + host: + separateGrpcIngress: true + separateGrpcIngressAnnotations: + : "grpc" + annotations: + : "/console" + : "/console" + : haproxy + tls: + enabled: true # enable tls + +- Disable ``insecure`` in your ``flytectl`` client ``config.yaml``: + +.. code-block:: yaml + + admin: + endpoint: dns:///example.com + authType: Pkce + insecure: false # disable insecure in flytectl + insecureSkipVerify: true diff --git a/docs/conf.py b/docs/conf.py index 316acc60be..14fbdf35a1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -37,7 +37,7 @@ # The short X.Y version version = "" # The full version, including alpha/beta/rc tags -release = "1.13.2" +release = "1.14.1" # -- General configuration --------------------------------------------------- @@ -343,6 +343,22 @@ "flytesnacks/README.md", "flytekit/**/README.md", "flytekit/_templates/**", + "examples/advanced_composition/**", + "examples/basics/**", + "examples/customizing_dependencies/**", + "examples/data_types_and_io/**", + "examples/development_lifecycle/**", + "examples/extending/**", + "examples/productionizing/**", + "examples/testing/**", + "flytesnacks/examples/advanced_composition/*.md", + "flytesnacks/examples/basics/*.md", + "flytesnacks/examples/customizing_dependencies/*.md", + "flytesnacks/examples/data_types_and_io/*.md", + "flytesnacks/examples/development_lifecycle/*.md", + "flytesnacks/examples/extending/*.md", + "flytesnacks/examples/productionizing/*.md", + "flytesnacks/examples/testing/*.md", "api/flytectl/index.rst", "protos/boilerplate/**", "protos/tmp/**", @@ -622,14 +638,6 @@ "flytesnacks/_build", "flytesnacks/_tags", "flytesnacks/index.md", - "examples/advanced_composition", - "examples/basics", - "examples/customizing_dependencies", - "examples/data_types_and_io", - "examples/development_lifecycle", - "examples/extending", - "examples/productionizing", - "examples/testing" ] ], "local": flytesnacks_local_path is not None, @@ -690,6 +698,15 @@ # Disable warnings from tensorflow os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" +# Define the canonical URL if you are using a custom domain on Read the Docs +html_baseurl = os.environ.get("READTHEDOCS_CANONICAL_URL", "") + +# Tell Jinja2 templates the build is running on Read the Docs +if os.environ.get("READTHEDOCS", "") == "True": + if "html_context" not in globals(): + html_context = {} + html_context["READTHEDOCS"] = True + class CustomWarningSuppressor(logging.Filter): """Filter logs by `suppress_warnings`.""" diff --git a/docs/deployment/configuration/generated/datacatalog_config.rst b/docs/deployment/configuration/generated/datacatalog_config.rst index e60af73564..52fe7109cc 100644 --- a/docs/deployment/configuration/generated/datacatalog_config.rst +++ b/docs/deployment/configuration/generated/datacatalog_config.rst @@ -224,6 +224,7 @@ postgres (`database.PostgresConfig`_) password: postgres passwordPath: "" port: 30001 + readReplicaHost: localhost username: postgres @@ -265,6 +266,18 @@ The host name of the database server localhost +readReplicaHost (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +The host name of the read replica database server + +**Default Value**: + +.. code-block:: yaml + + localhost + + port (int) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" diff --git a/docs/deployment/configuration/generated/flyteadmin_config.rst b/docs/deployment/configuration/generated/flyteadmin_config.rst index c8bbe4bd32..c7cf72dbff 100644 --- a/docs/deployment/configuration/generated/flyteadmin_config.rst +++ b/docs/deployment/configuration/generated/flyteadmin_config.rst @@ -143,6 +143,18 @@ Max number of gRPC retries "4" +maxMessageSizeBytes (int) +------------------------------------------------------------------------------------------------------------------------ + +The max size in bytes for incoming gRPC messages + +**Default Value**: + +.. code-block:: yaml + + "0" + + authType (uint8) ------------------------------------------------------------------------------------------------------------------------ @@ -1389,6 +1401,18 @@ kafka (`interfaces.KafkaConfig`_) .. code-block:: yaml brokers: null + saslConfig: + enabled: false + handshake: false + mechanism: "" + password: "" + passwordPath: "" + user: "" + tlsConfig: + certPath: "" + enabled: false + insecureSkipVerify: false + keyPath: "" version: "" @@ -1399,6 +1423,7 @@ eventsPublisher (`interfaces.EventsPublisherConfig`_) .. code-block:: yaml + enrichAllWorkflowEventTypes: false eventTypes: null topicName: "" @@ -1469,6 +1494,16 @@ eventTypes ([]string) null +enrichAllWorkflowEventTypes (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + interfaces.GCPConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1505,6 +1540,140 @@ brokers ([]string) null +saslConfig (`interfaces.SASLConfig`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + enabled: false + handshake: false + mechanism: "" + password: "" + passwordPath: "" + user: "" + + +tlsConfig (`interfaces.TLSConfig`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + certPath: "" + enabled: false + insecureSkipVerify: false + keyPath: "" + + +interfaces.SASLConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +user (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +password (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +passwordPath (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +handshake (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +mechanism (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +interfaces.TLSConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +insecureSkipVerify (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +certPath (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +keyPath (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + Section: cluster_resources ======================================================================================================================== @@ -1751,6 +1920,7 @@ postgres (`database.PostgresConfig`_) password: postgres passwordPath: "" port: 30001 + readReplicaHost: localhost username: postgres @@ -1779,6 +1949,18 @@ The host name of the database server localhost +readReplicaHost (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +The host name of the read replica database server + +**Default Value**: + +.. code-block:: yaml + + localhost + + port (int) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -1951,6 +2133,7 @@ eventsPublisher (`interfaces.EventsPublisherConfig`_) .. code-block:: yaml + enrichAllWorkflowEventTypes: false eventTypes: null topicName: "" @@ -2769,6 +2952,7 @@ k8s (`config.K8sPluginConfig`_) .. code-block:: yaml + add-tolerations-for-extended-resources: [] co-pilot: cpu: 500m default-input-path: /var/flyte/inputs @@ -2799,6 +2983,7 @@ k8s (`config.K8sPluginConfig`_) default-security-context: null default-tolerations: null delete-resource-on-finalize: false + enable-distributed-error-aggregation: false enable-host-networking-pod: null gpu-device-node-label: k8s.amazonaws.com/accelerator gpu-partition-size-node-label: k8s.amazonaws.com/gpu-partition-size @@ -3314,6 +3499,30 @@ Number of retries for exponential backoff when updating a resource. "5" +add-tolerations-for-extended-resources ([]string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Name of the extended resources for which tolerations should be added. + +**Default Value**: + +.. code-block:: yaml + + [] + + +enable-distributed-error-aggregation (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +If true, will aggregate errors of different worker pods for distributed tasks. + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.FlyteCoPilotConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3956,6 +4165,7 @@ Configuration for array nodes default-parallelism-behavior: unlimited event-version: 0 + use-map-plugin-logs: false literal-offloading-config (`config.LiteralOffloadingConfig`_) @@ -3971,7 +4181,7 @@ config used for literal offloading. max-size-in-mb-for-offloading: 1000 min-size-in-mb-for-offloading: 10 supported-sdk-versions: - FLYTE_SDK: 1.13.5 + FLYTE_SDK: 1.13.14 config.ArrayNodeConfig @@ -4001,6 +4211,18 @@ Default parallelism behavior for array nodes unlimited +use-map-plugin-logs (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Override subNode log links with those configured for the map plugin logs + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.CompositeQueueConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -4316,7 +4538,7 @@ Maps flytekit and union SDK names to minimum supported version that can handle r .. code-block:: yaml - FLYTE_SDK: 1.13.5 + FLYTE_SDK: 1.13.14 min-size-in-mb-for-offloading (int64) @@ -5014,6 +5236,7 @@ security (`config.ServerSecurityOptions`_) allowedOrigins: - '*' auditAccess: false + insecureCookieHeader: false secure: false ssl: certificateFile: "" @@ -5314,6 +5537,16 @@ useAuth (bool) "false" +insecureCookieHeader (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + auditAccess (bool) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" diff --git a/docs/deployment/configuration/generated/flytepropeller_config.rst b/docs/deployment/configuration/generated/flytepropeller_config.rst index fc78d202ca..5d428c65e3 100644 --- a/docs/deployment/configuration/generated/flytepropeller_config.rst +++ b/docs/deployment/configuration/generated/flytepropeller_config.rst @@ -113,6 +113,18 @@ Max number of gRPC retries "4" +maxMessageSizeBytes (int) +------------------------------------------------------------------------------------------------------------------------ + +The max size in bytes for incoming gRPC messages + +**Default Value**: + +.. code-block:: yaml + + "0" + + authType (uint8) ------------------------------------------------------------------------------------------------------------------------ @@ -1114,6 +1126,30 @@ catalogcache (`catalog.Config`_) workers: 10 +dask (`dask.Config`_) +------------------------------------------------------------------------------------------------------------------------ + +**Default Value**: + +.. code-block:: yaml + + logs: + cloudwatch-enabled: false + cloudwatch-log-group: "" + cloudwatch-region: "" + cloudwatch-template-uri: "" + dynamic-log-links: null + gcp-project: "" + kubernetes-enabled: true + kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName + }}/pod?namespace={{ .namespace }} + kubernetes-url: "" + stackdriver-enabled: false + stackdriver-logresourcename: "" + stackdriver-template-uri: "" + templates: null + + databricks (`databricks.Config`_) ------------------------------------------------------------------------------------------------------------------------ @@ -1164,6 +1200,7 @@ k8s (`config.K8sPluginConfig`_) .. code-block:: yaml + add-tolerations-for-extended-resources: [] co-pilot: cpu: 500m default-input-path: /var/flyte/inputs @@ -1194,6 +1231,7 @@ k8s (`config.K8sPluginConfig`_) default-security-context: null default-tolerations: null delete-resource-on-finalize: false + enable-distributed-error-aggregation: false enable-host-networking-pod: null gpu-device-node-label: k8s.amazonaws.com/accelerator gpu-partition-size-node-label: k8s.amazonaws.com/gpu-partition-size @@ -1364,7 +1402,7 @@ ray (`ray.Config`_) enabled: false endpoint: "" name: "" - serviceAccount: default + serviceAccount: "" serviceType: NodePort shutdownAfterJobFinishes: true ttlSecondsAfterFinished: 3600 @@ -2738,6 +2776,30 @@ Number of retries for exponential backoff when updating a resource. "5" +add-tolerations-for-extended-resources ([]string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Name of the extended resources for which tolerations should be added. + +**Default Value**: + +.. code-block:: yaml + + [] + + +enable-distributed-error-aggregation (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +If true, will aggregate errors of different worker pods for distributed tasks. + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.FlyteCoPilotConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -2938,6 +3000,188 @@ scale (int32) "0" +dask.Config +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +logs (`logs.LogConfig (logs)`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + cloudwatch-enabled: false + cloudwatch-log-group: "" + cloudwatch-region: "" + cloudwatch-template-uri: "" + dynamic-log-links: null + gcp-project: "" + kubernetes-enabled: true + kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName + }}/pod?namespace={{ .namespace }} + kubernetes-url: "" + stackdriver-enabled: false + stackdriver-logresourcename: "" + stackdriver-template-uri: "" + templates: null + + +logs.LogConfig (logs) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +cloudwatch-enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Enable Cloudwatch Logging + +**Default Value**: + +.. code-block:: yaml + + "false" + + +cloudwatch-region (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +AWS region in which Cloudwatch logs are stored. + +**Default Value**: + +.. code-block:: yaml + + "" + + +cloudwatch-log-group (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Log group to which streams are associated. + +**Default Value**: + +.. code-block:: yaml + + "" + + +cloudwatch-template-uri (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Template Uri to use when building cloudwatch log links + +**Default Value**: + +.. code-block:: yaml + + "" + + +kubernetes-enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Enable Kubernetes Logging + +**Default Value**: + +.. code-block:: yaml + + "true" + + +kubernetes-url (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Console URL for Kubernetes logs + +**Default Value**: + +.. code-block:: yaml + + "" + + +kubernetes-template-uri (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Template Uri to use when building kubernetes log links + +**Default Value**: + +.. code-block:: yaml + + http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName }}/pod?namespace={{ .namespace + }} + + +stackdriver-enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Enable Log-links to stackdriver + +**Default Value**: + +.. code-block:: yaml + + "false" + + +gcp-project (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Name of the project in GCP + +**Default Value**: + +.. code-block:: yaml + + "" + + +stackdriver-logresourcename (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Name of the logresource in stackdriver + +**Default Value**: + +.. code-block:: yaml + + "" + + +stackdriver-template-uri (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Template Uri to use when building stackdriver log links + +**Default Value**: + +.. code-block:: yaml + + "" + + +dynamic-log-links (map[string]tasklog.TemplateLogPlugin) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + null + + +templates ([]tasklog.TemplateLogPlugin) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + null + + databricks.Config ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3276,7 +3520,7 @@ Certificate path k8s.LogConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -config (`logs.LogConfig (config)`_) +config (`logs.LogConfig`_) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" Defines the log config for k8s logs. @@ -3301,162 +3545,6 @@ Defines the log config for k8s logs. templates: null -logs.LogConfig (config) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -cloudwatch-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Cloudwatch Logging - -**Default Value**: - -.. code-block:: yaml - - "false" - - -cloudwatch-region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -AWS region in which Cloudwatch logs are stored. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cloudwatch-log-group (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Log group to which streams are associated. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cloudwatch-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building cloudwatch log links - -**Default Value**: - -.. code-block:: yaml - - "" - - -kubernetes-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Kubernetes Logging - -**Default Value**: - -.. code-block:: yaml - - "true" - - -kubernetes-url (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Console URL for Kubernetes logs - -**Default Value**: - -.. code-block:: yaml - - "" - - -kubernetes-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building kubernetes log links - -**Default Value**: - -.. code-block:: yaml - - http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName }}/pod?namespace={{ .namespace - }} - - -stackdriver-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Log-links to stackdriver - -**Default Value**: - -.. code-block:: yaml - - "false" - - -gcp-project (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the project in GCP - -**Default Value**: - -.. code-block:: yaml - - "" - - -stackdriver-logresourcename (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the logresource in stackdriver - -**Default Value**: - -.. code-block:: yaml - - "" - - -stackdriver-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building stackdriver log links - -**Default Value**: - -.. code-block:: yaml - - "" - - -dynamic-log-links (map[string]tasklog.TemplateLogPlugin) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -templates ([]tasklog.TemplateLogPlugin) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - k8s.ResourceConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3800,7 +3888,7 @@ The k8s service account to run as .. code-block:: yaml - default + "" ray.DefaultConfig @@ -4588,6 +4676,7 @@ Configuration for array nodes default-parallelism-behavior: unlimited event-version: 0 + use-map-plugin-logs: false literal-offloading-config (`config.LiteralOffloadingConfig`_) @@ -4603,7 +4692,7 @@ config used for literal offloading. max-size-in-mb-for-offloading: 1000 min-size-in-mb-for-offloading: 10 supported-sdk-versions: - FLYTE_SDK: 1.13.5 + FLYTE_SDK: 1.13.14 admin-launcher (`launchplan.AdminConfig`_) @@ -4614,6 +4703,7 @@ admin-launcher (`launchplan.AdminConfig`_) .. code-block:: yaml burst: 10 + cache-resync-duration: 30s cacheSize: 10000 tps: 100 workers: 10 @@ -4673,6 +4763,18 @@ Default parallelism behavior for array nodes unlimited +use-map-plugin-logs (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Override subNode log links with those configured for the map plugin logs + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.CompositeQueueConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -5094,7 +5196,7 @@ Maps flytekit and union SDK names to minimum supported version that can handle r .. code-block:: yaml - FLYTE_SDK: 1.13.5 + FLYTE_SDK: 1.13.14 min-size-in-mb-for-offloading (int64) @@ -5301,6 +5403,18 @@ Number of parallel workers to work on the queue. "10" +cache-resync-duration (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Frequency of re-syncing launchplans within the auto refresh cache. + +**Default Value**: + +.. code-block:: yaml + + 30s + + workflowstore.Config ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/deployment/configuration/generated/scheduler_config.rst b/docs/deployment/configuration/generated/scheduler_config.rst index 8904155e7c..ad79695906 100644 --- a/docs/deployment/configuration/generated/scheduler_config.rst +++ b/docs/deployment/configuration/generated/scheduler_config.rst @@ -143,6 +143,18 @@ Max number of gRPC retries "4" +maxMessageSizeBytes (int) +------------------------------------------------------------------------------------------------------------------------ + +The max size in bytes for incoming gRPC messages + +**Default Value**: + +.. code-block:: yaml + + "0" + + authType (uint8) ------------------------------------------------------------------------------------------------------------------------ @@ -1389,6 +1401,18 @@ kafka (`interfaces.KafkaConfig`_) .. code-block:: yaml brokers: null + saslConfig: + enabled: false + handshake: false + mechanism: "" + password: "" + passwordPath: "" + user: "" + tlsConfig: + certPath: "" + enabled: false + insecureSkipVerify: false + keyPath: "" version: "" @@ -1399,6 +1423,7 @@ eventsPublisher (`interfaces.EventsPublisherConfig`_) .. code-block:: yaml + enrichAllWorkflowEventTypes: false eventTypes: null topicName: "" @@ -1469,6 +1494,16 @@ eventTypes ([]string) null +enrichAllWorkflowEventTypes (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + interfaces.GCPConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1505,6 +1540,140 @@ brokers ([]string) null +saslConfig (`interfaces.SASLConfig`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + enabled: false + handshake: false + mechanism: "" + password: "" + passwordPath: "" + user: "" + + +tlsConfig (`interfaces.TLSConfig`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + certPath: "" + enabled: false + insecureSkipVerify: false + keyPath: "" + + +interfaces.SASLConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +user (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +password (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +passwordPath (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +handshake (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +mechanism (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +interfaces.TLSConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +insecureSkipVerify (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + +certPath (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + +keyPath (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "" + + Section: cluster_resources ======================================================================================================================== @@ -1751,6 +1920,7 @@ postgres (`database.PostgresConfig`_) password: postgres passwordPath: "" port: 30001 + readReplicaHost: localhost username: postgres @@ -1779,6 +1949,18 @@ The host name of the database server localhost +readReplicaHost (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +The host name of the read replica database server + +**Default Value**: + +.. code-block:: yaml + + localhost + + port (int) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -1951,6 +2133,7 @@ eventsPublisher (`interfaces.EventsPublisherConfig`_) .. code-block:: yaml + enrichAllWorkflowEventTypes: false eventTypes: null topicName: "" @@ -2769,6 +2952,7 @@ k8s (`config.K8sPluginConfig`_) .. code-block:: yaml + add-tolerations-for-extended-resources: [] co-pilot: cpu: 500m default-input-path: /var/flyte/inputs @@ -2799,6 +2983,7 @@ k8s (`config.K8sPluginConfig`_) default-security-context: null default-tolerations: null delete-resource-on-finalize: false + enable-distributed-error-aggregation: false enable-host-networking-pod: null gpu-device-node-label: k8s.amazonaws.com/accelerator gpu-partition-size-node-label: k8s.amazonaws.com/gpu-partition-size @@ -3314,6 +3499,30 @@ Number of retries for exponential backoff when updating a resource. "5" +add-tolerations-for-extended-resources ([]string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Name of the extended resources for which tolerations should be added. + +**Default Value**: + +.. code-block:: yaml + + [] + + +enable-distributed-error-aggregation (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +If true, will aggregate errors of different worker pods for distributed tasks. + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.FlyteCoPilotConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3956,6 +4165,7 @@ Configuration for array nodes default-parallelism-behavior: unlimited event-version: 0 + use-map-plugin-logs: false literal-offloading-config (`config.LiteralOffloadingConfig`_) @@ -3971,7 +4181,7 @@ config used for literal offloading. max-size-in-mb-for-offloading: 1000 min-size-in-mb-for-offloading: 10 supported-sdk-versions: - FLYTE_SDK: 1.13.5 + FLYTE_SDK: 1.13.14 config.ArrayNodeConfig @@ -4001,6 +4211,18 @@ Default parallelism behavior for array nodes unlimited +use-map-plugin-logs (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Override subNode log links with those configured for the map plugin logs + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.CompositeQueueConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -4316,7 +4538,7 @@ Maps flytekit and union SDK names to minimum supported version that can handle r .. code-block:: yaml - FLYTE_SDK: 1.13.5 + FLYTE_SDK: 1.13.14 min-size-in-mb-for-offloading (int64) @@ -5014,6 +5236,7 @@ security (`config.ServerSecurityOptions`_) allowedOrigins: - '*' auditAccess: false + insecureCookieHeader: false secure: false ssl: certificateFile: "" @@ -5314,6 +5537,16 @@ useAuth (bool) "false" +insecureCookieHeader (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + "false" + + auditAccess (bool) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" diff --git a/docs/deployment/deployment/cloud_simple.rst b/docs/deployment/deployment/cloud_simple.rst index 94833a0db4..e6a6802758 100644 --- a/docs/deployment/deployment/cloud_simple.rst +++ b/docs/deployment/deployment/cloud_simple.rst @@ -29,9 +29,11 @@ these prerequisites. .. note:: - `Union.AI `__ plans to open-source a reference - implementation of these requirements for the major cloud providers in early - 2023. + + `Union.ai `__ maintains a `set of Terraform scripts `__ that automate the configuration + of prerequisites and Flyte installation on AWS, GCP, or Azure. + + A community-maintained guide to manually prepare an EKS environment and deploy Flyte is available `here `__ *************** Installation diff --git a/docs/user_guide/advanced_composition/chaining_flyte_entities.md b/docs/user_guide/advanced_composition/chaining_flyte_entities.md index 4fb12a4149..3a6f85a725 100644 --- a/docs/user_guide/advanced_composition/chaining_flyte_entities.md +++ b/docs/user_guide/advanced_composition/chaining_flyte_entities.md @@ -17,7 +17,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte Let's establish a sequence where `t1()` occurs after `t0()`, and `t2()` follows `t1()`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/chain_entities.py +```{literalinclude} /examples/advanced_composition/advanced_composition/chain_entities.py :caption: advanced_composition/chain_entities.py :lines: 1-30 ``` @@ -27,7 +27,7 @@ Let's establish a sequence where `t1()` occurs after `t0()`, and `t2()` follows Just like tasks, you can chain {ref}`subworkflows `. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/chain_entities.py +```{literalinclude} /examples/advanced_composition/advanced_composition/chain_entities.py :caption: advanced_composition/chain_entities.py :lines: 34-49 ``` diff --git a/docs/user_guide/advanced_composition/conditionals.md b/docs/user_guide/advanced_composition/conditionals.md index 27fb05357b..84d21bb300 100644 --- a/docs/user_guide/advanced_composition/conditionals.md +++ b/docs/user_guide/advanced_composition/conditionals.md @@ -18,7 +18,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the necessary libraries. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 1-4 ``` @@ -29,7 +29,7 @@ In this example, we introduce two tasks, `calculate_circle_circumference` and `calculate_circle_area`. The workflow dynamically chooses between these tasks based on whether the input falls within the fraction range (0-1) or not. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 12-38 ``` @@ -40,7 +40,7 @@ We establish an `if` condition with multiple branches, which will result in a fa It's important to note that any `conditional` statement in Flyte is expected to be complete, meaning that all possible branches must be accounted for. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :pyobject: shape_properties_with_multiple_branches ``` @@ -55,7 +55,7 @@ a convention also observed in other libraries. ## Consuming the output of a conditional Here, we write a task that consumes the output returned by a `conditional`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 67-85 ``` @@ -66,7 +66,7 @@ You can check if a boolean returned from the previous task is `True`, but unary operations are not supported directly. Instead, use the `is_true`, `is_false` and `is_none` methods on the result. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 93-123 ``` @@ -79,7 +79,7 @@ Inputs and outputs are automatically encapsulated in a special object known as { ## Using boolean workflow inputs in a conditional You can directly pass a boolean to a workflow. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :pyobject: boolean_input_wf ``` @@ -92,7 +92,7 @@ This special object enables it to exhibit additional behavior. You can run the workflows locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 133-139 ``` @@ -102,7 +102,7 @@ You can run the workflows locally as follows: You can nest conditional sections arbitrarily inside other conditional sections. However, these nested sections can only be in the `then` part of a `conditional` block. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 146-168 ``` @@ -112,14 +112,14 @@ However, these nested sections can only be in the `then` part of a `conditional` Let's write a fun workflow that triggers the `calculate_circle_circumference` task in the event of a "heads" outcome, and alternatively, runs the `calculate_circle_area` task in the event of a "tail" outcome. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :pyobject: consume_task_output ``` You can run the workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 216-225 ``` @@ -138,7 +138,7 @@ task-plugins: ``` ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/656e63d1c8dded3e9e7161c7af6425e9fcd43f56/examples/advanced_composition/advanced_composition/conditional.py +```{literalinclude} /examples/advanced_composition/advanced_composition/conditional.py :caption: advanced_composition/conditional.py :lines: 200-212 ``` diff --git a/docs/user_guide/advanced_composition/decorating_tasks.md b/docs/user_guide/advanced_composition/decorating_tasks.md index 6b39c9f363..7165703798 100644 --- a/docs/user_guide/advanced_composition/decorating_tasks.md +++ b/docs/user_guide/advanced_composition/decorating_tasks.md @@ -17,14 +17,14 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the required dependencies. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_tasks.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_tasks.py :caption: advanced_composition/decorating_tasks.py :lines: 1-4 ``` Create a logger to monitor the execution's progress. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_tasks.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_tasks.py :caption: advanced_composition/decorating_tasks.py :lines: 7 ``` @@ -33,7 +33,7 @@ Create a logger to monitor the execution's progress. We define a decorator that logs the input and output details for a decorated task. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_tasks.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_tasks.py :caption: advanced_composition/decorating_tasks.py :pyobject: log_io ``` @@ -44,7 +44,7 @@ We create a task named `t1` that is decorated with `log_io`. The order of invoking the decorators is important. `@task` should always be the outer-most decorator. ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_tasks.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_tasks.py :caption: advanced_composition/decorating_tasks.py :pyobject: t1 ``` @@ -58,7 +58,7 @@ You can also stack multiple decorators on top of each other as long as `@task` i We define a decorator that verifies if the output from the decorated function is a positive number before it's returned. If this assumption is violated, it raises a `ValueError` exception. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_tasks.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_tasks.py :caption: advanced_composition/decorating_tasks.py :pyobject: validate_output ``` @@ -69,14 +69,14 @@ The output of the `validate_output` task uses {py:func}`~functools.partial` to i We define a function that uses both the logging and validator decorators. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_tasks.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_tasks.py :caption: advanced_composition/decorating_tasks.py :pyobject: t2 ``` Finally, we compose a workflow that calls `t1` and `t2`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_tasks.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_tasks.py :caption: advanced_composition/decorating_tasks.py :lines: 53-59 ``` diff --git a/docs/user_guide/advanced_composition/decorating_workflows.md b/docs/user_guide/advanced_composition/decorating_workflows.md index 751cd6a95c..ee5f02c8a3 100644 --- a/docs/user_guide/advanced_composition/decorating_workflows.md +++ b/docs/user_guide/advanced_composition/decorating_workflows.md @@ -23,7 +23,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the necessary libraries. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_workflows.py :caption: advanced_composition/decorating_workflows.py :lines: 1-6 ``` @@ -32,7 +32,7 @@ Let's define the tasks we need for setup and teardown. In this example, we use t {py:class}`unittest.mock.MagicMock` class to create a fake external service that we want to initialize at the beginning of our workflow and finish at the end. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_workflows.py :caption: advanced_composition/decorating_workflows.py :lines: 9-21 ``` @@ -45,7 +45,7 @@ external service and Flyte. We create a decorator that we want to use to wrap our workflow function. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_workflows.py :caption: advanced_composition/decorating_workflows.py :pyobject: setup_teardown ``` @@ -66,14 +66,14 @@ There are a few key pieces to note in the `setup_teardown` decorator above: We define two tasks that will constitute the workflow. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_workflows.py :caption: advanced_composition/decorating_workflows.py :lines: 63-70 ``` And then create our decorated workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/decorating_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/decorating_workflows.py :caption: advanced_composition/decorating_workflows.py :lines: 74-82 ``` diff --git a/docs/user_guide/advanced_composition/dynamic_workflows.md b/docs/user_guide/advanced_composition/dynamic_workflows.md index 949d88a847..e54e83df01 100644 --- a/docs/user_guide/advanced_composition/dynamic_workflows.md +++ b/docs/user_guide/advanced_composition/dynamic_workflows.md @@ -40,28 +40,28 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, we import the required libraries. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/dynamic_workflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py :caption: advanced_composition/dynamic_workflow.py :lines: 1 ``` We define a task that returns the index of a character, where A-Z/a-z is equivalent to 0-25. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/dynamic_workflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py :caption: advanced_composition/dynamic_workflow.py :pyobject: return_index ``` We also create a task that prepares a list of 26 characters by populating the frequency of each character. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/dynamic_workflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py :caption: advanced_composition/dynamic_workflow.py :pyobject: update_list ``` We define a task to calculate the number of common characters between the two strings. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/dynamic_workflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py :caption: advanced_composition/dynamic_workflow.py :pyobject: derive_count ``` @@ -75,7 +75,7 @@ We define a dynamic workflow to accomplish the following: The looping process is contingent on the number of characters in both strings, which is unknown until runtime. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/dynamic_workflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py :caption: advanced_composition/dynamic_workflow.py :pyobject: count_characters ``` @@ -97,14 +97,14 @@ Local execution works when a `@dynamic` decorator is used because Flytekit treat Define a workflow that triggers the dynamic workflow. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/dynamic_workflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py :caption: advanced_composition/dynamic_workflow.py :pyobject: dynamic_wf ``` You can run the workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/dynamic_workflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py :caption: advanced_composition/dynamic_workflow.py :lines: 78-79 ``` @@ -141,63 +141,9 @@ resulting in less noticeable overhead. Merge sort is a perfect example to showcase how to seamlessly achieve recursion using dynamic workflows. Flyte imposes limitations on the depth of recursion to prevent misuse and potential impacts on the overall stability of the system. -```python -from typing import Tuple - -from flytekit import conditional, dynamic, task, workflow - - -@task -def split(numbers: list[int]) -> Tuple[list[int], list[int], int, int]: - return ( - numbers[0 : int(len(numbers) / 2)], - numbers[int(len(numbers) / 2) :], - int(len(numbers) / 2), - int(len(numbers)) - int(len(numbers) / 2), - ) - - -@task -def merge(sorted_list1: list[int], sorted_list2: list[int]) -> list[int]: - result = [] - while len(sorted_list1) > 0 and len(sorted_list2) > 0: - # Compare the current element of the first array with the current element of the second array. - # If the element in the first array is smaller, append it to the result and increment the first array index. - # Otherwise, do the same with the second array. - if sorted_list1[0] < sorted_list2[0]: - result.append(sorted_list1.pop(0)) - else: - result.append(sorted_list2.pop(0)) - - # Extend the result with the remaining elements from both arrays - result.extend(sorted_list1) - result.extend(sorted_list2) - - return result - - -@task -def sort_locally(numbers: list[int]) -> list[int]: - return sorted(numbers) - - -@dynamic -def merge_sort_remotely(numbers: list[int], run_local_at_count: int) -> list[int]: - split1, split2, new_count1, new_count2 = split(numbers=numbers) - sorted1 = merge_sort(numbers=split1, numbers_count=new_count1, run_local_at_count=run_local_at_count) - sorted2 = merge_sort(numbers=split2, numbers_count=new_count2, run_local_at_count=run_local_at_count) - return merge(sorted_list1=sorted1, sorted_list2=sorted2) - - -@workflow -def merge_sort(numbers: list[int], numbers_count: int, run_local_at_count: int = 5) -> list[int]: - return ( - conditional("terminal_case") - .if_(numbers_count <= run_local_at_count) - .then(sort_locally(numbers=numbers)) - .else_() - .then(merge_sort_remotely(numbers=numbers, run_local_at_count=run_local_at_count)) - ) +```{literalinclude} /examples/advanced_composition/advanced_composition/dynamic_workflow.py +:caption: advanced_composition/dynamic_workflow.py +:lines: 84-134 ``` By simply adding the `@dynamic` annotation, the `merge_sort_remotely` function transforms into a plan of execution, diff --git a/docs/user_guide/advanced_composition/eager_workflows.md b/docs/user_guide/advanced_composition/eager_workflows.md index 9bf3e019c8..94b77af7b4 100644 --- a/docs/user_guide/advanced_composition/eager_workflows.md +++ b/docs/user_guide/advanced_composition/eager_workflows.md @@ -7,7 +7,7 @@ ``` ```{important} -This feature is experimental and the API is subject to breaking changes. +This feature is in beta and the API is still subject to minor changes. If you encounter any issues please consider submitting a [bug report](https://github.com/flyteorg/flyte/issues/new?assignees=&labels=bug%2Cuntriaged&projects=&template=bug_report.yaml&title=%5BBUG%5D+). ``` @@ -45,7 +45,7 @@ using the `@eager` decorator. To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :lines: 1-21 ``` @@ -57,8 +57,7 @@ tasks, static subworkflows, and even other eager subworkflows in an _eager_ fashion such that we can materialize their outputs and use them inside the parent eager workflow itself. -In the `simple_eager_workflow` function, we can see that we're `await`ing -the output of the `add_one` task and assigning it to the `out` variable. If +In the `simple_eager_workflow` function, we can call the `add_one` task and assigning it to the `out` variable. If `out` is a negative integer, the workflow will return `-1`. Otherwise, it will double the output of `add_one` and return it. @@ -67,12 +66,17 @@ the Python integer that is the result of `x + 1` and not a promise. ## How it works -When you decorate a function with `@eager`, any function invoked within it -that's decorated with `@task`, `@workflow`, or `@eager` becomes -an [awaitable](https://docs.python.org/3/library/asyncio-task.html#awaitables) -object within the lifetime of the parent eager workflow execution. Note that -this happens automatically and you don't need to use the `async` keyword when -defining a task or workflow that you want to invoke within an eager workflow. +### Parallels to Python `asyncio` +The eager paradigm was written around Python's native `async` functionality. As such, it follows the same rules and +constructs and if you're used to working with async functions, you should be able to apply the exact same understanding to work with eager tasks. + +In the example above, the tasks `add_one` and `double` are normal Flyte tasks and the functions being decorated are normal Python functions. This means that in the execution of the async task `simple_eager_workflow` will block on each of these functions just like Python would if these were simply just Python functions. + +If you want to run functions in parallel, you will need to use `async` marked tasks, just like you would in normal Python. + +Note that `eager` tasks share the same limitation as Python async functions. You can only call an `async` function inside another `async` function, or within a special handler (like `asyncio.run`). This means that until the `@workflow` decorator supports async workflow function definitions, which is doesn't today, you will not be able to call eager tasks or other async Python function tasks, inside workflows. This functionality is slated to be added in future releases. For the time being, you will need to run the tasks directly, either from FlyteRemote or the Flyte UI. + +Unlike Python async however, when an `eager` task runs `async` sub-tasks in a real backend execution (not a local execution), it is doing real, wall-clock time parallelism, not just concurrency (assuming your K8s cluster is appropriately sized). ```{important} With eager workflows, you basically have access to the Python `asyncio` @@ -85,10 +89,10 @@ We're leveraging Python's native `async` capabilities in order to: 1. Materialize the output of flyte tasks and subworkflows so you can operate on them without spinning up another pod and also determine the shape of the workflow graph in an extremely flexible manner. -2. Provide an alternative way of achieving concurrency in Flyte. Flyte has +2. Provide an alternative way of achieving wall-time parallelism in Flyte. Flyte has concurrency built into it, so all tasks/subworkflows will execute concurrently - assuming that they don't have any dependencies on each other. However, eager - workflows provide a python-native way of doing this, with the main downside + assuming that they don't have any dependencies on each other. However, `eager` + tasks provide a Python-native way of doing this, with the main downside being that you lose the benefits of statically compiled workflows such as compile-time analysis and first-class data lineage tracking. ``` @@ -116,7 +120,7 @@ One of the biggest benefits of eager workflows is that you can now materialize task and subworkflow outputs as Python values and do operations on them just like you would in any other Python function. Let's look at another example: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :pyobject: another_eager_workflow ``` @@ -131,7 +135,7 @@ As you saw in the `simple_eager_workflow` workflow above, you can use regular Python conditionals in your eager workflows. Let's look at a more complicated example: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :lines: 36-53 ``` @@ -142,9 +146,9 @@ to check if `out` is negative, but we're also using the `gt_100` task in the ### Loops -You can also gather the outputs of multiple tasks or subworkflows into a list: +You can also gather the outputs of multiple tasks or subworkflows into a list. Keep in mind that in this case, you will need to use an `async` function task, since normal tasks will block. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :lines: 58-69 ``` @@ -153,7 +157,7 @@ You can also gather the outputs of multiple tasks or subworkflows into a list: You can also invoke static workflows from within an eager workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :lines: 74-84 ``` @@ -162,7 +166,7 @@ You can also invoke static workflows from within an eager workflow: You can have nest eager subworkflows inside a parent eager workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :lines: 89-97 ``` @@ -171,7 +175,7 @@ You can have nest eager subworkflows inside a parent eager workflow: You can also catch exceptions in eager workflows through `EagerException`: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :lines: 102-117 ``` @@ -195,7 +199,7 @@ and remotely. You can execute eager workflows locally by simply calling them like a regular `async` function: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py +```{literalinclude} /examples/advanced_composition/advanced_composition/eager_workflows.py :caption: advanced_composition/eager_workflows.py :lines: 123-125 ``` @@ -206,60 +210,38 @@ developing your workflows and tasks. (eager_workflows_remote)= -### Remote Flyte cluster execution +### Setting up remote Flyte cluster access Under the hood, `@eager` workflows use the {py:class}`~flytekit.remote.remote.FlyteRemote` -object to kick off task, static workflow, and eager workflow executions. - -In order to actually execute them on a Flyte cluster, you'll need to configure -eager workflows with a `FlyteRemote` object and secrets configuration that -allows you to authenticate into the cluster via a client secret key: - -```{code-block} python -from flytekit.remote import FlyteRemote -from flytekit.configuration import Config - -@eager( - remote=FlyteRemote( - config=Config.auto(config_file="config.yaml"), - default_project="flytesnacks", - default_domain="development", - ), - client_secret_group="", - client_secret_key="", -) -async def eager_workflow_remote(x: int) -> int: - ... +object to kick off task, static workflow, and eager workflow executions. In order to create a `FlyteRemote` instance, `Config.auto()` is run and the resulting config object is passed into `FlyteRemote`. + +This means that you just need to ensure eager workflow pods are run with the environment variables that including any mounted secrets for a client secret. For instance, the following three are sufficient in the most basic setup. + +```{code-block} bash +FLYTE_PLATFORM_URL +FLYTE_CREDENTIALS_CLIENT_ID +FLYTE_CREDENTIALS_CLIENT_SECRET ``` -Where `config.yaml` contains a -[flytectl](https://docs.flyte.org/projects/flytectl/en/latest/#configuration)-compatible -config file and `my_client_secret_group` and `my_client_secret_key` are the -{ref}`secret group and key ` that you've configured for your Flyte -cluster to authenticate via a client key. +See the relevant authentication docs for creating client credentials if using Flyte's internal authorization server. ### Sandbox Flyte cluster execution -When using a sandbox cluster started with `flytectl demo start`, however, the -`client_secret_group` and `client_secret_key` are not required, since the -default sandbox configuration does not require key-based authentication. - -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/eager_workflows.py -:caption: advanced_composition/eager_workflows.py -:lines: 130-145 -``` +When using a sandbox cluster started with `flytectl demo start` no authentication is needed and eager workflows should +just work out of the box. ```{important} When executing eager workflows on a remote Flyte cluster, it will execute the -latest version of tasks, static workflows, and eager workflows that are on -the `default_project` and `default_domain` as specified in the `FlyteRemote` -object. This means that you need to pre-register all Flyte entities that are -invoked inside of the eager workflow. +same version of tasks, static workflows, and eager workflows that are on +the `project` and `domain` as the eager task itself. If an entity is not found, FlyteRemote will attempt to register +it. Please be aware of this and potential naming errors due to difference in folder paths when running in the container. +Future work may be done to allow execution in another project/domain, but reference entities should always be +correctly reflected and invoked. ``` ### Registering and running -Assuming that your `flytekit` code is configured correctly, you will need to +Assuming that your `flytekit` code is configured correctly, you should register all of the task and subworkflows that are used with your eager workflow with `pyflyte register`: @@ -290,7 +272,7 @@ invoked inside of it. ## Eager workflows on Flyte console -Since eager workflows are an experimental feature, there is currently no +Since eager workflows are still in beta, there is currently no first-class representation of them on Flyte Console, the UI for Flyte. When you register an eager workflow, you'll be able to see it in the task view: @@ -321,23 +303,20 @@ eager workflow: ## Limitations -As this feature is still experimental, there are a few limitations that you +As this feature is still in beta, there are a few limitations that you need to keep in mind: -- You cannot invoke {ref}`dynamic workflows `, - {ref}`map tasks `, or {ref}`launch plans ` inside an - eager workflow. - [Context managers](https://docs.python.org/3/library/contextlib.html) will only work on locally executed functions within the eager workflow, i.e. using a context manager to modify the behavior of a task or subworkflow will not work because they are executed on a completely different pod. - All exceptions raised by Flyte tasks or workflows will be caught and raised - as an {py:class}`~flytekit.experimental.EagerException` at runtime. + as an {py:class}`~flytekit.exceptions.eager.EagerException` at runtime. - All task/subworkflow outputs are materialized as Python values, which includes offloaded types like `FlyteFile`, `FlyteDirectory`, `StructuredDataset`, and `pandas.DataFrame` will be fully downloaded into the pod running the eager workflow. This prevents you from incrementally downloading or streaming very large datasets - in eager workflows. + in eager workflows. (Please reach out to the team if you are interested in improving this.) - Flyte entities that are invoked inside of an eager workflow must be registered under the same project and domain as the eager workflow itself. The eager workflow will execute the latest version of these entities. diff --git a/docs/user_guide/advanced_composition/intratask_checkpoints.md b/docs/user_guide/advanced_composition/intratask_checkpoints.md index d856a45714..81631e40e7 100644 --- a/docs/user_guide/advanced_composition/intratask_checkpoints.md +++ b/docs/user_guide/advanced_composition/intratask_checkpoints.md @@ -51,14 +51,14 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the necessary libraries and set the number of task retries to `3`: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/checkpoint.py +```{literalinclude} /examples/advanced_composition/advanced_composition/checkpoint.py :caption: advanced_composition/checkpoint.py :lines: 1-4 ``` We define a task to iterate precisely `n_iterations`, checkpoint its state, and recover from simulated failures: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/checkpoint.py +```{literalinclude} /examples/advanced_composition/advanced_composition/checkpoint.py :caption: advanced_composition/checkpoint.py :pyobject: use_checkpoint ``` @@ -69,14 +69,14 @@ The checkpoint system offers additional APIs, documented in the code accessible Create a workflow that invokes the task: The task will automatically undergo retries in the event of a {ref}`FlyteRecoverableException `. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/checkpoint.py +```{literalinclude} /examples/advanced_composition/advanced_composition/checkpoint.py :caption: advanced_composition/checkpoint.py :pyobject: checkpointing_example ``` The local checkpoint is not utilized here because retries are not supported: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/checkpoint.py +```{literalinclude} /examples/advanced_composition/advanced_composition/checkpoint.py :caption: advanced_composition/checkpoint.py :lines: 37-42 ``` diff --git a/docs/user_guide/advanced_composition/map_tasks.md b/docs/user_guide/advanced_composition/map_tasks.md index 26330a8cd5..f73946cd89 100644 --- a/docs/user_guide/advanced_composition/map_tasks.md +++ b/docs/user_guide/advanced_composition/map_tasks.md @@ -23,14 +23,14 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the required libraries: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/map_task.py +```{literalinclude} /examples/advanced_composition/advanced_composition/map_task.py :caption: advanced_composition/map_task.py :lines: 1 ``` Here's a simple workflow that uses {py:func}`map_task `: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/map_task.py +```{literalinclude} /examples/advanced_composition/advanced_composition/map_task.py :caption: advanced_composition/map_task.py :lines: 4-19 ``` @@ -82,7 +82,7 @@ When defining a map task, avoid calling other tasks in it. Flyte can't accuratel In this example, the map task `suboptimal_mappable_task` would not give you the best performance: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/map_task.py +```{literalinclude} /examples/advanced_composition/advanced_composition/map_task.py :caption: advanced_composition/map_task.py :lines: 31-40 ``` @@ -98,7 +98,7 @@ You might need to map a task with multiple inputs. For instance, consider a task that requires three inputs: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/map_task.py +```{literalinclude} /examples/advanced_composition/advanced_composition/map_task.py :caption: advanced_composition/map_task.py :pyobject: multi_input_task ``` @@ -107,21 +107,21 @@ You may want to map this task with only the ``quantity`` input, while keeping th Since a map task accepts only one input, you can achieve this by partially binding values to the map task. This can be done using the {py:func}`functools.partial` function: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/map_task.py +```{literalinclude} /examples/advanced_composition/advanced_composition/map_task.py :caption: advanced_composition/map_task.py :lines: 52-58 ``` Another possibility is to bind the outputs of a task to partials: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/map_task.py +```{literalinclude} /examples/advanced_composition/advanced_composition/map_task.py :caption: advanced_composition/map_task.py :lines: 63-72 ``` You can also provide multiple lists as input to a `map_task`: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/map_task.py +```{literalinclude} /examples/advanced_composition/advanced_composition/map_task.py :caption: advanced_composition/map_task.py :pyobject: map_workflow_with_lists ``` diff --git a/docs/user_guide/advanced_composition/subworkflows.md b/docs/user_guide/advanced_composition/subworkflows.md index 08a4bbb8d4..14d3cc1006 100644 --- a/docs/user_guide/advanced_composition/subworkflows.md +++ b/docs/user_guide/advanced_composition/subworkflows.md @@ -24,7 +24,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte Here's an example illustrating the calculation of slope, intercept and the corresponding y-value: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/subworkflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/subworkflow.py :caption: advanced_composition/subworkflow.py :lines: 1-35 ``` @@ -34,7 +34,7 @@ Subsequently, the `regression_line_wf` triggers `slope_intercept_wf` and then co To execute the workflow locally, use the following: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/subworkflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/subworkflow.py :caption: advanced_composition/subworkflow.py :lines: 39-40 ``` @@ -43,14 +43,14 @@ It's possible to nest a workflow that contains a subworkflow within another work Workflows can be easily constructed from other workflows, even if they function as standalone entities. Each workflow in this module has the capability to exist and run independently: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/subworkflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/subworkflow.py :caption: advanced_composition/subworkflow.py :pyobject: nested_regression_line_wf ``` You can run the nested workflow locally as well: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/subworkflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/subworkflow.py :caption: advanced_composition/subworkflow.py :lines: 52-53 ``` @@ -71,7 +71,7 @@ external workflows may offer a way to distribute the workload of a workflow acro Here's an example that illustrates the concept of external workflows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/subworkflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/subworkflow.py :caption: advanced_composition/subworkflow.py :lines: 61-71 ``` @@ -85,7 +85,7 @@ In the console screenshot above, note that the launch plan execution ID differs You can run a workflow containing an external workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/subworkflow.py +```{literalinclude} /examples/advanced_composition/advanced_composition/subworkflow.py :caption: advanced_composition/subworkflow.py :lines: 75-76 ``` diff --git a/docs/user_guide/advanced_composition/waiting_for_external_inputs.md b/docs/user_guide/advanced_composition/waiting_for_external_inputs.md index 0d3a2aae28..edeb6e2b95 100644 --- a/docs/user_guide/advanced_composition/waiting_for_external_inputs.md +++ b/docs/user_guide/advanced_composition/waiting_for_external_inputs.md @@ -43,7 +43,7 @@ your workflow to mock out the behavior of some long-running computation. To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py +```{literalinclude} /examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py :caption: advanced_composition/waiting_for_external_inputs.py :lines: 1-20 ``` @@ -75,7 +75,7 @@ but before publishing it you want to give it a custom title. You can achieve this by defining a `wait_for_input` node that takes a `str` input and finalizes the report: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py +```{literalinclude} /examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py :caption: advanced_composition/waiting_for_external_inputs.py :lines: 24-49 ``` @@ -107,7 +107,7 @@ an explicit approval signal before continuing execution. Going back to our report-publishing use case, suppose that we want to block the publishing of a report for some reason (e.g. if they don't appear to be valid): -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py +```{literalinclude} /examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py :caption: advanced_composition/waiting_for_external_inputs.py :lines: 53-64 ``` @@ -120,7 +120,7 @@ You can also use the output of the `approve` function as a promise, feeding it to a subsequent task. Let's create a version of our report-publishing workflow where the approval happens after `create_report`: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py +```{literalinclude} /examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py :caption: advanced_composition/waiting_for_external_inputs.py :pyobject: approval_as_promise_wf ``` @@ -133,7 +133,7 @@ useful when we combine them with other Flyte constructs, like {ref}`conditionals To illustrate this, let's extend the report-publishing use case so that we produce an "invalid report" output in case we don't approve the final report: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py +```{literalinclude} /examples/advanced_composition/advanced_composition/waiting_for_external_inputs.py :caption: advanced_composition/waiting_for_external_inputs.py :lines: 88-114 ``` diff --git a/docs/user_guide/basics/documenting_workflows.md b/docs/user_guide/basics/documenting_workflows.md index 9f5e20d5fb..954bd35302 100644 --- a/docs/user_guide/basics/documenting_workflows.md +++ b/docs/user_guide/basics/documenting_workflows.md @@ -15,14 +15,14 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the relevant libraries: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/documenting_workflows.py +```{literalinclude} /examples/basics/basics/documenting_workflows.py :caption: basics/documenting_workflows.py :lines: 1-3 ``` We import the `slope` and `intercept` tasks from the `workflow.py` file. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/documenting_workflows.py +```{literalinclude} /examples/basics/basics/documenting_workflows.py :caption: basics/documenting_workflows.py :lines: 6 ``` @@ -35,7 +35,7 @@ The initial section of the docstring provides a concise overview of the workflow The subsequent section provides a comprehensive explanation. The last part of the docstring outlines the parameters and return type. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/documenting_workflows.py +```{literalinclude} /examples/basics/basics/documenting_workflows.py :caption: basics/documenting_workflows.py :pyobject: sphinx_docstring_wf ``` @@ -49,7 +49,7 @@ The next section offers a comprehensive description. The third section of the docstring details all parameters along with their respective data types. The final section of the docstring explains the return type and its associated data type. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/documenting_workflows.py +```{literalinclude} /examples/basics/basics/documenting_workflows.py :caption: basics/documenting_workflows.py :pyobject: numpy_docstring_wf ``` @@ -63,7 +63,7 @@ The subsequent section of the docstring provides an extensive explanation. The third segment of the docstring outlines the parameters and return type, including their respective data types. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/documenting_workflows.py +```{literalinclude} /examples/basics/basics/documenting_workflows.py :caption: basics/documenting_workflows.py :pyobject: google_docstring_wf ``` diff --git a/docs/user_guide/basics/hello_world.md b/docs/user_guide/basics/hello_world.md index a63b175fbe..d310a80273 100644 --- a/docs/user_guide/basics/hello_world.md +++ b/docs/user_guide/basics/hello_world.md @@ -17,7 +17,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import `task` and `workflow` from the `flytekit` library: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/hello_world.py +```{literalinclude} /examples/basics/basics/hello_world.py :caption: basics/hello_world.py :lines: 1 ``` @@ -25,7 +25,7 @@ To begin, import `task` and `workflow` from the `flytekit` library: Define a task that produces the string "Hello, World!". Simply using the `@task` decorator to annotate the Python function: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/hello_world.py +```{literalinclude} /examples/basics/basics/hello_world.py :caption: basics/hello_world.py :pyobject: say_hello ``` @@ -33,14 +33,14 @@ Simply using the `@task` decorator to annotate the Python function: You can handle the output of a task in the same way you would with a regular Python function. Store the output in a variable and use it as a return value for a Flyte workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/hello_world.py +```{literalinclude} /examples/basics/basics/hello_world.py :caption: basics/hello_world.py :pyobject: hello_world_wf ``` Run the workflow by simply calling it like a Python function: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/hello_world.py +```{literalinclude} /examples/basics/basics/hello_world.py :caption: basics/hello_world.py :lines: 19-20 ``` diff --git a/docs/user_guide/basics/imperative_workflows.md b/docs/user_guide/basics/imperative_workflows.md index db0e3a5ee0..e6d189c1ba 100644 --- a/docs/user_guide/basics/imperative_workflows.md +++ b/docs/user_guide/basics/imperative_workflows.md @@ -23,28 +23,28 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the necessary dependencies: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/imperative_workflow.py +```{literalinclude} /examples/basics/basics/imperative_workflow.py :caption: basics/imperative_workflow.py :lines: 1 ``` We import the `slope` and `intercept` tasks from the `workflow.py` file: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/imperative_workflow.py +```{literalinclude} /examples/basics/basics/imperative_workflow.py :caption: basics/imperative_workflow.py :lines: 4 ``` Create an imperative workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/imperative_workflow.py +```{literalinclude} /examples/basics/basics/imperative_workflow.py :caption: basics/imperative_workflow.py :lines: 7 ``` Add the workflow inputs to the imperative workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/imperative_workflow.py +```{literalinclude} /examples/basics/basics/imperative_workflow.py :caption: basics/imperative_workflow.py :lines: 11-12 ``` @@ -56,21 +56,21 @@ you can create a {ref}`launch plan `. Add the tasks that need to be triggered from within the workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/imperative_workflow.py +```{literalinclude} /examples/basics/basics/imperative_workflow.py :caption: basics/imperative_workflow.py :lines: 16-19 ``` Lastly, add the workflow output: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/imperative_workflow.py +```{literalinclude} /examples/basics/basics/imperative_workflow.py :caption: basics/imperative_workflow.py :lines: 23 ``` You can execute the workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/imperative_workflow.py +```{literalinclude} /examples/basics/basics/imperative_workflow.py :caption: basics/imperative_workflow.py :lines: 27-28 ``` diff --git a/docs/user_guide/basics/launch_plans.md b/docs/user_guide/basics/launch_plans.md index 63ace47a70..6f1f983ba6 100644 --- a/docs/user_guide/basics/launch_plans.md +++ b/docs/user_guide/basics/launch_plans.md @@ -27,56 +27,56 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the necessary libraries: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 1 ``` We import the workflow from the `workflow.py` file for which we're going to create a launch plan: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 5 ``` Create a default launch plan with no inputs during serialization: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 8 ``` You can run the launch plan locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 11 ``` Create a launch plan and specify the default inputs: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 14-16 ``` You can trigger the launch plan locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 19 ``` You can override the defaults as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 22 ``` It's possible to lock launch plan inputs, preventing them from being overridden during execution: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/launch_plan.py +```{literalinclude} /examples/basics/basics/launch_plan.py :caption: basics/launch_plan.py :lines: 25-27 ``` diff --git a/docs/user_guide/basics/named_outputs.md b/docs/user_guide/basics/named_outputs.md index 00b9160997..1d274c88b6 100644 --- a/docs/user_guide/basics/named_outputs.md +++ b/docs/user_guide/basics/named_outputs.md @@ -22,21 +22,21 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the required dependencies: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/named_outputs.py +```{literalinclude} /examples/basics/basics/named_outputs.py :caption: basics/named_outputs.py :lines: 1-3 ``` Define a `NamedTuple` and assign it as an output to a task: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/named_outputs.py +```{literalinclude} /examples/basics/basics/named_outputs.py :caption: basics/named_outputs.py :lines: 6-14 ``` Likewise, assign a `NamedTuple` to the output of `intercept` task: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/named_outputs.py +```{literalinclude} /examples/basics/basics/named_outputs.py :caption: basics/named_outputs.py :lines: 18-26 ``` @@ -59,14 +59,14 @@ Remember that we are extracting individual task execution outputs by dereferenci This is necessary because `NamedTuple`s function as tuples and require this dereferencing: ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/named_outputs.py +```{literalinclude} /examples/basics/basics/named_outputs.py :caption: basics/named_outputs.py :lines: 32-39 ``` You can run the workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/named_outputs.py +```{literalinclude} /examples/basics/basics/named_outputs.py :caption: basics/named_outputs.py :lines: 43-44 ``` diff --git a/docs/user_guide/basics/shell_tasks.md b/docs/user_guide/basics/shell_tasks.md index 8680b87f5d..e1f8b23bef 100644 --- a/docs/user_guide/basics/shell_tasks.md +++ b/docs/user_guide/basics/shell_tasks.md @@ -15,7 +15,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte First, import the necessary libraries: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/shell_task.py +```{literalinclude} /examples/basics/basics/shell_task.py :caption: basics/shell_task.py :lines: 1-8 ``` @@ -24,7 +24,7 @@ With the required imports in place, you can proceed to define a shell task. To create a shell task, provide a name for it, specify the bash script to be executed, and define inputs and outputs if needed: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/shell_task.py +```{literalinclude} /examples/basics/basics/shell_task.py :caption: basics/shell_task.py :lines: 13-55 ``` @@ -40,21 +40,21 @@ Here's a breakdown of the parameters of the `ShellTask`: We define a task to instantiate `FlyteFile` and `FlyteDirectory`. A `.gitkeep` file is created in the FlyteDirectory as a placeholder to ensure the directory exists: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/shell_task.py +```{literalinclude} /examples/basics/basics/shell_task.py :caption: basics/shell_task.py :pyobject: create_entities ``` We create a workflow to define the dependencies between the tasks: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/shell_task.py +```{literalinclude} /examples/basics/basics/shell_task.py :caption: basics/shell_task.py :pyobject: shell_task_wf ``` You can run the workflow locally: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/shell_task.py +```{literalinclude} /examples/basics/basics/shell_task.py :caption: basics/shell_task.py :lines: 85-86 ``` diff --git a/docs/user_guide/basics/tasks.md b/docs/user_guide/basics/tasks.md index b76e61f5dc..22d078f9f1 100644 --- a/docs/user_guide/basics/tasks.md +++ b/docs/user_guide/basics/tasks.md @@ -34,7 +34,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import `task` from the `flytekit` library: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/basics/basics/task.py +```{literalinclude} /examples/basics/basics/task.py :caption: basics/task.py :lines: 1 ``` @@ -45,7 +45,7 @@ Learn more about the supported types in the {ref}`type-system section = 1.11.0. ::: +### Raise User Error + +Raw containers handle errors by checking for the presence of an `_ERROR` file in the +`output_data_dir` after the container's execution. If this file exists, Flyte treats it as +a user-defined error and retries the task if `retries` parameter is set in the task +metadata. + ## Scripts The contents of each script specified in the `ContainerTask` is as follows: diff --git a/docs/user_guide/data_types_and_io/accessing_attributes.md b/docs/user_guide/data_types_and_io/accessing_attributes.md index 1eda605183..82b2345ad5 100644 --- a/docs/user_guide/data_types_and_io/accessing_attributes.md +++ b/docs/user_guide/data_types_and_io/accessing_attributes.md @@ -11,15 +11,19 @@ Note that while this functionality may appear to be the normal behavior of Pytho Consequently, accessing attributes in this manner is, in fact, a specially implemented feature. This functionality facilitates the direct passing of output attributes within workflows, enhancing the convenience of working with complex data structures. +```{important} +Flytekit version >= v1.14.0 supports Pydantic BaseModel V2, you can do attribute access on Pydantic BaseModel V2 as well. +``` + ```{note} To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` To begin, import the required dependencies and define a common task for subsequent use: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/attribute_access.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/attribute_access.py :caption: data_types_and_io/attribute_access.py -:lines: 1-10 +:lines: 1-9 ``` ## List @@ -29,40 +33,40 @@ You can access an output list using index notation. Flyte currently does not support output promise access through list slicing. ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/attribute_access.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/attribute_access.py :caption: data_types_and_io/attribute_access.py -:lines: 14-23 +:lines: 13-22 ``` ## Dictionary Access the output dictionary by specifying the key. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/attribute_access.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/attribute_access.py :caption: data_types_and_io/attribute_access.py -:lines: 27-35 +:lines: 26-34 ``` ## Data class Directly access an attribute of a dataclass. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/attribute_access.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/attribute_access.py :caption: data_types_and_io/attribute_access.py -:lines: 39-53 +:lines: 38-51 ``` ## Complex type Combinations of list, dict and dataclass also work effectively. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/attribute_access.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/attribute_access.py :caption: data_types_and_io/attribute_access.py -:lines: 57-80 +:lines: 55-78 ``` You can run all the workflows locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/attribute_access.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/attribute_access.py :caption: data_types_and_io/attribute_access.py -:lines: 84-88 +:lines: 82-86 ``` ## Failure scenario diff --git a/docs/user_guide/data_types_and_io/dataclass.md b/docs/user_guide/data_types_and_io/dataclass.md index bc7ae9a26d..462ba7da3a 100644 --- a/docs/user_guide/data_types_and_io/dataclass.md +++ b/docs/user_guide/data_types_and_io/dataclass.md @@ -11,8 +11,24 @@ When you've multiple values that you want to send across Flyte entities, you can Flytekit uses the [Mashumaro library](https://github.com/Fatal1ty/mashumaro) to serialize and deserialize dataclasses. +With the 1.14 release, `flytekit` adopted `MessagePack` as the +serialization format for dataclasses, overcoming a major limitation of serialization into a JSON string within a Protobuf `struct` datatype, like the previous versions do: + +to store `int` types, Protobuf's `struct` converts them to `float`, forcing users to write boilerplate code to work around this issue. + +:::{important} +If you're using Flytekit version < v1.11.1, you will need to add `from dataclasses_json import dataclass_json` to your imports and decorate your dataclass with `@dataclass_json`. +::: + :::{important} -If you're using Flytekit version below v1.11.1, you will need to add `from dataclasses_json import dataclass_json` to your imports and decorate your dataclass with `@dataclass_json`. +Flytekit version < v1.14.0 will produce protobuf `struct` literal for dataclasses. + +Flytekit version >= v1.14.0 will produce msgpack bytes literal for dataclasses. + +If you're using Flytekit version >= v1.14.0 and you want to produce protobuf `struct` literal for dataclasses, you can +set environment variable `FLYTE_USE_OLD_DC_FORMAT` to `true`. + +For more details, you can refer the MSGPACK IDL RFC: https://github.com/flyteorg/flyte/blob/master/rfc/system/5741-binary-idl-with-message-pack.md ::: ```{note} @@ -21,13 +37,13 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the necessary dependencies: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/cfb5ea3b0d0502ef7df1f2e14f4a0d9b78250b6a/examples/data_types_and_io/data_types_and_io/dataclass.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/dataclass.py :caption: data_types_and_io/dataclass.py :lines: 1-9 ``` Build your custom image with ImageSpec: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/cfb5ea3b0d0502ef7df1f2e14f4a0d9b78250b6a/examples/data_types_and_io/data_types_and_io/dataclass.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/dataclass.py :caption: data_types_and_io/dataclass.py :lines: 16-19 ``` @@ -35,7 +51,7 @@ Build your custom image with ImageSpec: ## Python types We define a `dataclass` with `int`, `str` and `dict` as the data types. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/cfb5ea3b0d0502ef7df1f2e14f4a0d9b78250b6a/examples/data_types_and_io/data_types_and_io/dataclass.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/dataclass.py :caption: data_types_and_io/dataclass.py :pyobject: Datum ``` @@ -48,7 +64,7 @@ All variables in a data class should be **annotated with their type**. Failure t Once declared, a dataclass can be returned as an output or accepted as an input. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/cfb5ea3b0d0502ef7df1f2e14f4a0d9b78250b6a/examples/data_types_and_io/data_types_and_io/dataclass.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/dataclass.py :caption: data_types_and_io/dataclass.py :lines: 32-47 ``` @@ -57,7 +73,7 @@ Once declared, a dataclass can be returned as an output or accepted as an input. We also define a data class that accepts {std:ref}`StructuredDataset `, {std:ref}`FlyteFile ` and {std:ref}`FlyteDirectory `. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/cfb5ea3b0d0502ef7df1f2e14f4a0d9b78250b6a/examples/data_types_and_io/data_types_and_io/dataclass.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/dataclass.py :caption: data_types_and_io/dataclass.py :lines: 51-88 ``` @@ -67,14 +83,14 @@ flyte file, flyte directory and structured dataset. We define a workflow that calls the tasks created above. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/cfb5ea3b0d0502ef7df1f2e14f4a0d9b78250b6a/examples/data_types_and_io/data_types_and_io/dataclass.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/dataclass.py :caption: data_types_and_io/dataclass.py :pyobject: dataclass_wf ``` You can run the workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/cfb5ea3b0d0502ef7df1f2e14f4a0d9b78250b6a/examples/data_types_and_io/data_types_and_io/dataclass.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/dataclass.py :caption: data_types_and_io/dataclass.py :lines: 101-102 ``` diff --git a/docs/user_guide/data_types_and_io/enum_type.md b/docs/user_guide/data_types_and_io/enum_type.md index b8e9011921..f5b1873d98 100644 --- a/docs/user_guide/data_types_and_io/enum_type.md +++ b/docs/user_guide/data_types_and_io/enum_type.md @@ -22,7 +22,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the dependencies: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/enum_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/enum_type.py :caption: data_types_and_io/enum_type.py :lines: 1-3 ``` @@ -30,14 +30,14 @@ To begin, import the dependencies: We define an enum and a simple coffee maker workflow that accepts an order and brews coffee ☕️ accordingly. The assumption is that the coffee maker only understands enum inputs: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/enum_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/enum_type.py :caption: data_types_and_io/enum_type.py :lines: 9-35 ``` The workflow can also accept an enum value: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/enum_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/enum_type.py :caption: data_types_and_io/enum_type.py :pyobject: coffee_maker_enum ``` @@ -51,7 +51,7 @@ pyflyte run \ You can run the workflows locally: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/enum_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/enum_type.py :caption: data_types_and_io/enum_type.py :lines: 44-46 ``` diff --git a/docs/user_guide/data_types_and_io/flytedirectory.md b/docs/user_guide/data_types_and_io/flytedirectory.md index 4ad2316ded..82cc5ab2a0 100644 --- a/docs/user_guide/data_types_and_io/flytedirectory.md +++ b/docs/user_guide/data_types_and_io/flytedirectory.md @@ -16,7 +16,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the libraries: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/folder.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/folder.py :caption: data_types_and_io/folder.py :lines: 1-10 ``` @@ -27,7 +27,7 @@ let's continue by considering the normalization of columns in a CSV file. The following task downloads a list of URLs pointing to CSV files and returns the folder path in a `FlyteDirectory` object. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/folder.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/folder.py :caption: data_types_and_io/folder.py :pyobject: download_files ``` @@ -57,7 +57,7 @@ demonstrates how Flyte tasks are simply entrypoints of execution, which can them other functions and routines that are written in pure Python. ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/folder.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/folder.py :caption: data_types_and_io/folder.py :pyobject: normalize_columns ``` @@ -65,7 +65,7 @@ other functions and routines that are written in pure Python. We then define a task that accepts the previously downloaded folder, along with some metadata about the column names of each file in the directory and the column names that we want to normalize. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/folder.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/folder.py :caption: data_types_and_io/folder.py :pyobject: normalize_all_files ``` @@ -74,14 +74,14 @@ Compose all of the above tasks into a workflow. This workflow accepts a list of URL strings pointing to a remote location containing a CSV file, a list of column names associated with each CSV file, and a list of columns that we want to normalize. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/folder.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/folder.py :caption: data_types_and_io/folder.py :pyobject: download_and_normalize_csv_files ``` You can run the workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/folder.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/folder.py :caption: data_types_and_io/folder.py :lines: 94-114 ``` @@ -98,7 +98,7 @@ This feature is marked as experimental. We'd love feedback on the API! Here is a simple example, you can accept a `FlyteDirectory` as an input, walk through it and copy the files to another `FlyteDirectory` one by one. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/ddce0448141ea6d2cb148df52bf408874adb15ad/examples/data_types_and_io/data_types_and_io/file_streaming.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/file_streaming.py :caption: data_types_and_io/file_streaming.py :lines: 23-33 ``` diff --git a/docs/user_guide/data_types_and_io/flytefile.md b/docs/user_guide/data_types_and_io/flytefile.md index 76dc0f6be8..997dbe031a 100644 --- a/docs/user_guide/data_types_and_io/flytefile.md +++ b/docs/user_guide/data_types_and_io/flytefile.md @@ -23,7 +23,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte First, import the libraries: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/file.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/file.py :caption: data_types_and_io/file.py :lines: 1-8 ``` @@ -43,7 +43,7 @@ Predefined aliases for commonly used flyte file formats are also available. You can find them [here](https://github.com/flyteorg/flytekit/blob/master/flytekit/types/file/__init__.py). ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/file.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/file.py :caption: data_types_and_io/file.py :pyobject: normalize_columns ``` @@ -56,16 +56,16 @@ When this task finishes, the Flytekit engine returns the `FlyteFile` instance, u Lastly, define a workflow. The `normalize_csv_files` workflow has an `output_location` argument which is passed to the `location` input of the task. If it's not an empty string, the task attempts to upload its file to that location. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/data_types_and_io/data_types_and_io/file.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/file.py :caption: data_types_and_io/file.py :pyobject: normalize_csv_file ``` You can run the workflow locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/data_types_and_io/data_types_and_io/file.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/file.py :caption: data_types_and_io/file.py -:lines: 75-95 +:lines: 72-92 ``` You can enable type validation if you have the [python-magic](https://pypi.org/project/python-magic/) package installed. @@ -101,7 +101,7 @@ This feature is marked as experimental. We'd love feedback on the API! Here is a simple example of removing some columns from a CSV file and writing the result to a new file: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/ddce0448141ea6d2cb148df52bf408874adb15ad/examples/data_types_and_io/data_types_and_io/file_streaming.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/file_streaming.py :caption: data_types_and_io/file_streaming.py :lines: 8-20 ``` diff --git a/docs/user_guide/data_types_and_io/index.md b/docs/user_guide/data_types_and_io/index.md index 3280054696..b863aacaee 100644 --- a/docs/user_guide/data_types_and_io/index.md +++ b/docs/user_guide/data_types_and_io/index.md @@ -108,13 +108,21 @@ Here's a breakdown of these mappings: - Structured Dataset - Automatic - Use ``pandas.DataFrame`` as a type hint. Pandas column types aren't preserved. + * - ``polars.DataFrame`` + - Structured Dataset + - Automatic + - Use ``polars.DataFrame`` as a type hint. Polars column types aren't preserved. + * - ``polars.LazyFrame`` + - Structured Dataset + - Automatic + - Use ``polars.LazyFrame`` as a type hint. Polars column types aren't preserved. * - ``pyspark.DataFrame`` - Structured Dataset - To utilize the type, install the ``flytekitplugins-spark`` plugin. - Use ``pyspark.DataFrame`` as a type hint. * - ``pydantic.BaseModel`` - ``Map`` - - To utilize the type, install the ``flytekitplugins-pydantic`` plugin. + - To utilize the type, install the ``pydantic>2`` module. - Use ``pydantic.BaseModel`` as a type hint. * - ``torch.Tensor`` / ``torch.nn.Module`` - File @@ -144,6 +152,7 @@ flytefile flytedirectory structureddataset dataclass +pydantic_basemodel accessing_attributes pytorch_type enum_type diff --git a/docs/user_guide/data_types_and_io/pickle_type.md b/docs/user_guide/data_types_and_io/pickle_type.md index 301ff95f9f..b0c497ef35 100644 --- a/docs/user_guide/data_types_and_io/pickle_type.md +++ b/docs/user_guide/data_types_and_io/pickle_type.md @@ -27,7 +27,7 @@ This example demonstrates how you can utilize custom objects without registering To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/pickle_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pickle_type.py :caption: data_types_and_io/pickle_type.py :lines: 1 ``` @@ -40,33 +40,9 @@ Alternatively, you can {ref}`turn this object into a dataclass ` for We have used a simple object here for demonstration purposes. ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/pickle_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pickle_type.py :caption: data_types_and_io/pickle_type.py :lines: 7-26 ``` -## Batch size - -By default, if the list subtype is unrecognized, a single pickle file is generated. -To optimize serialization and deserialization performance for scenarios involving a large number of items -or significant list elements, you can specify a batch size. -This feature allows for the processing of each batch as a separate pickle file. -The following example demonstrates how to set the batch size. - -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/pickle_type.py -:caption: data_types_and_io/pickle_type.py -:lines: 35-58 -``` - -:::{note} -The `welcome_superheroes` task will generate two pickle files: one containing two superheroes and the other containing one superhero. -::: - -You can run the workflows locally as follows: - -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/pickle_type.py -:caption: data_types_and_io/pickle_type.py -:lines: 62-64 -``` - [flytesnacks]: https://github.com/flyteorg/flytesnacks/tree/master/examples/data_types_and_io/ diff --git a/docs/user_guide/data_types_and_io/pydantic_basemodel.md b/docs/user_guide/data_types_and_io/pydantic_basemodel.md new file mode 100644 index 0000000000..be40672534 --- /dev/null +++ b/docs/user_guide/data_types_and_io/pydantic_basemodel.md @@ -0,0 +1,103 @@ +(pydantic_basemodel)= + +# Pydantic BaseModel + +```{eval-rst} +.. tags:: Basic +``` + +`flytekit` version >=1.14 supports natively the `JSON` format that Pydantic `BaseModel` produces, enhancing the +interoperability of Pydantic BaseModels with the Flyte type system. + +:::{important} +Pydantic BaseModel V2 only works when you are using flytekit version >= v1.14.0. +::: + +With the 1.14 release, `flytekit` adopted `MessagePack` as the serialization format for Pydantic `BaseModel`, +overcoming a major limitation of serialization into a JSON string within a Protobuf `struct` datatype like the previous versions do: + +to store `int` types, Protobuf's `struct` converts them to `float`, forcing users to write boilerplate code to work around this issue. + +:::{important} +By default, `flytekit >= 1.14` will produce `msgpack` bytes literals when serializing, preserving the types defined in your `BaseModel` class. +If you're serializing `BaseModel` using `flytekit` version >= v1.14.0 and you want to produce Protobuf `struct` literal instead, you can set environment variable `FLYTE_USE_OLD_DC_FORMAT` to `true`. + +For more details, you can refer the MESSAGEPACK IDL RFC: https://github.com/flyteorg/flyte/blob/master/rfc/system/5741-binary-idl-with-message-pack.md +::: + +```{note} +You can put Dataclass and FlyteTypes (FlyteFile, FlyteDirectory, FlyteSchema, and StructuredDataset) in a pydantic BaseModel. +``` + +```{note} +To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. +``` + +To begin, import the necessary dependencies: + +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py +:caption: data_types_and_io/pydantic_basemodel.py +:lines: 1-9 +``` + +Build your custom image with ImageSpec: +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py +:caption: data_types_and_io/pydantic_basemodel.py +:lines: 11-14 +``` + +## Python types +We define a `pydantic basemodel` with `int`, `str` and `dict` as the data types. + +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py +:caption: data_types_and_io/pydantic_basemodel.py +:pyobject: Datum +``` + +You can send a `pydantic basemodel` between different tasks written in various languages, and input it through the Flyte console as raw JSON. + +:::{note} +All variables in a data class should be **annotated with their type**. Failure to do should will result in an error. +::: + +Once declared, a dataclass can be returned as an output or accepted as an input. + +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py +:caption: data_types_and_io/pydantic_basemodel.py +:lines: 26-41 +``` + +## Flyte types +We also define a data class that accepts {std:ref}`StructuredDataset `, +{std:ref}`FlyteFile ` and {std:ref}`FlyteDirectory `. + +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py +:caption: data_types_and_io/pydantic_basemodel.py +:lines: 45-86 +``` + +A data class supports the usage of data associated with Python types, data classes, +flyte file, flyte directory and structured dataset. + +We define a workflow that calls the tasks created above. + +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py +:caption: data_types_and_io/pydantic_basemodel.py +:pyobject: basemodel_wf +``` + +You can run the workflow locally as follows: + +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py +:caption: data_types_and_io/pydantic_basemodel.py +:lines: 99-100 +``` + +To trigger a task that accepts a dataclass as an input with `pyflyte run`, you can provide a JSON file as an input: +``` +pyflyte run \ + https://raw.githubusercontent.com/flyteorg/flytesnacks/b71e01d45037cea883883f33d8d93f258b9a5023/examples/data_types_and_io/data_types_and_io/pydantic_basemodel.py \ + basemodel_wf --x 1 --y 2 +``` + +[flytesnacks]: https://github.com/flyteorg/flytesnacks/tree/master/examples/data_types_and_io/ diff --git a/docs/user_guide/data_types_and_io/pytorch_type.md b/docs/user_guide/data_types_and_io/pytorch_type.md index 24696f6a75..685c7a38b2 100644 --- a/docs/user_guide/data_types_and_io/pytorch_type.md +++ b/docs/user_guide/data_types_and_io/pytorch_type.md @@ -18,7 +18,7 @@ At times, you may find the need to pass tensors and modules (models) within your To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/pytorch_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pytorch_type.py :caption: data_types_and_io/pytorch_type.py :lines: 5-50 ``` @@ -36,7 +36,7 @@ According to the PyTorch [docs](https://pytorch.org/tutorials/beginner/saving_lo it's recommended to store the module's `state_dict` rather than the module itself, although the serialization should work in either case. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/pytorch_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/pytorch_type.py :caption: data_types_and_io/pytorch_type.py :lines: 63-117 ``` diff --git a/docs/user_guide/data_types_and_io/structureddataset.md b/docs/user_guide/data_types_and_io/structureddataset.md index 9a82610590..caacb15b89 100644 --- a/docs/user_guide/data_types_and_io/structureddataset.md +++ b/docs/user_guide/data_types_and_io/structureddataset.md @@ -37,14 +37,14 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the dependencies for the example: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py :lines: 1-19 ``` Define a task that returns a Pandas DataFrame. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py :pyobject: generate_pandas_df ``` @@ -66,9 +66,9 @@ you can just specify the column names and their types in the structured dataset First, initialize column types you want to extract from the `StructuredDataset`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py -:lines: 31-32 +:lines: 36-37 ``` Define a task that opens a structured dataset by calling `all()`. @@ -76,9 +76,9 @@ When you invoke `all()` with ``pandas.DataFrame``, the Flyte engine downloads th Keep in mind that you can invoke ``open()`` with any dataframe type that's supported or added to structured dataset. For instance, you can use ``pa.Table`` to convert the Pandas DataFrame to a PyArrow table. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py -:lines: 42-52 +:lines: 47-57 ``` The code may result in runtime failures if the columns do not match. @@ -89,9 +89,9 @@ You can use a custom serialization format to serialize your dataframes. Here's how you can register the Pandas to CSV handler, which is already available, and enable the CSV serialization by annotating the structured dataset with the CSV format: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py -:lines: 58-72 +:lines: 63-77 ``` ## Storage driver and location @@ -198,7 +198,7 @@ enabling the use of a 2D NumPy array as a valid type within structured datasets. Extend `StructuredDatasetEncoder` and implement the `encode` function. The `encode` function converts NumPy array to an intermediate format (parquet file format in this case). -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py :pyobject: NumpyEncodingHandler ``` @@ -208,7 +208,7 @@ The `encode` function converts NumPy array to an intermediate format (parquet fi Extend {py:class}`StructuredDatasetDecoder` and implement the {py:meth}`~StructuredDatasetDecoder.decode` function. The {py:meth}`~StructuredDatasetDecoder.decode` function converts the parquet file to a `numpy.ndarray`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py :pyobject: NumpyDecodingHandler ``` @@ -218,7 +218,7 @@ The {py:meth}`~StructuredDatasetDecoder.decode` function converts the parquet fi Create a default renderer for numpy array, then Flytekit will use this renderer to display schema of NumPy array on the Flyte deck. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py :pyobject: NumpyRenderer ``` @@ -228,16 +228,16 @@ Specify the Python type you want to register this encoder with (`np.ndarray`), the storage engine to register this against (if not specified, it is assumed to work for all the storage backends), and the byte format, which in this case is `PARQUET`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py -:lines: 128-130 +:lines: 133-135 ``` You can now use `numpy.ndarray` to deserialize the parquet file to NumPy and serialize a task's output (NumPy array) to a parquet file. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py -:lines: 135-148 +:lines: 140-153 ``` :::{note} @@ -246,9 +246,9 @@ You can now use `numpy.ndarray` to deserialize the parquet file to NumPy and ser You can run the code locally as follows: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py -:lines: 152-156 +:lines: 157-161 ``` ### The nested typed columns @@ -259,9 +259,9 @@ Like most storage formats (e.g. Avro, Parquet, and BigQuery), StructuredDataset Nested field StructuredDataset should be run when flytekit version > 1.11.0. ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/data_types_and_io/data_types_and_io/structured_dataset.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/structured_dataset.py :caption: data_types_and_io/structured_dataset.py -:lines: 158-285 +:lines: 163-290 ``` [flytesnacks]: https://github.com/flyteorg/flytesnacks/tree/master/examples/data_types_and_io/ diff --git a/docs/user_guide/data_types_and_io/tensorflow_type.md b/docs/user_guide/data_types_and_io/tensorflow_type.md index 43f620ce01..9035c71db5 100644 --- a/docs/user_guide/data_types_and_io/tensorflow_type.md +++ b/docs/user_guide/data_types_and_io/tensorflow_type.md @@ -9,7 +9,7 @@ This document outlines the TensorFlow types available in Flyte, which facilitate the integration of TensorFlow models and datasets in Flyte workflows. ### Import necessary libraries and modules -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/9aadec205a6e208c62e29f52873fb3d675965a51/examples/data_types_and_io/data_types_and_io/tensorflow_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/tensorflow_type.py :caption: data_types_and_io/tensorflow_type.py :lines: 3-12 ``` @@ -30,7 +30,7 @@ The `TensorFlowModelTransformer` allows you to save a TensorFlow model to a remo ```{note} To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/9aadec205a6e208c62e29f52873fb3d675965a51/examples/data_types_and_io/data_types_and_io/tensorflow_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/tensorflow_type.py :caption: data_types_and_io/tensorflow_type.py :lines: 16-34 ``` @@ -47,7 +47,7 @@ Flyte supports TFRecord files through the TFRecordFile type, which can handle se ### Usage The `TensorFlowRecordFileTransformer` enables you to work with single TFRecord files, making it easy to read and write data in TensorFlow's TFRecord format. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/9aadec205a6e208c62e29f52873fb3d675965a51/examples/data_types_and_io/data_types_and_io/tensorflow_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/tensorflow_type.py :caption: data_types_and_io/tensorflow_type.py :lines: 38-48 ``` @@ -66,7 +66,7 @@ Flyte supports directories containing multiple TFRecord files through the `TFRec The `TensorFlowRecordsDirTransformer` allows you to work with directories of TFRecord files, which is useful for handling large datasets that are split across multiple files. #### Example -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/9aadec205a6e208c62e29f52873fb3d675965a51/examples/data_types_and_io/data_types_and_io/tensorflow_type.py +```{literalinclude} /examples/data_types_and_io/data_types_and_io/tensorflow_type.py :caption: data_types_and_io/tensorflow_type.py :lines: 52-62 ``` diff --git a/docs/user_guide/development_lifecycle/cache_serializing.md b/docs/user_guide/development_lifecycle/cache_serializing.md index 1445de13cf..4ea14fb89e 100644 --- a/docs/user_guide/development_lifecycle/cache_serializing.md +++ b/docs/user_guide/development_lifecycle/cache_serializing.md @@ -17,7 +17,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte For any {py:func}`flytekit.task` in Flyte, there is always one required import, which is: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache_serialize.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache_serialize.py :caption: development_lifecycle/task_cache_serialize.py :lines: 1 ``` @@ -27,7 +27,7 @@ Task cache serializing is disabled by default to avoid unexpected behavior for t This operation is only useful for cacheable tasks, where one may reuse output from a previous execution. Flyte requires implicitly enabling the `cache` parameter on all cache serializable tasks. Cache key definitions follow the same rules as non-serialized cache tasks. It is important to understand the implications of the task signature and `cache_version` parameter in defining cached results. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache_serialize.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache_serialize.py :caption: development_lifecycle/task_cache_serialize.py :pyobject: square ``` diff --git a/docs/user_guide/development_lifecycle/caching.md b/docs/user_guide/development_lifecycle/caching.md index ea6a5af574..2711aee68a 100644 --- a/docs/user_guide/development_lifecycle/caching.md +++ b/docs/user_guide/development_lifecycle/caching.md @@ -77,19 +77,19 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte Import the necessary libraries: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache.py :caption: development_lifecycle/task_cache.py :lines: 1-3 ``` For any {py:func}`flytekit.task` in Flyte, there is always one required import, which is: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache.py :caption: development_lifecycle/task_cache.py :lines: 8-10 ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache.py :caption: development_lifecycle/task_cache.py :pyobject: square ``` @@ -150,18 +150,18 @@ The format used by the store is opaque and not meant to be inspectable. The default behavior displayed by Flyte's memoization feature might not match the user intuition. For example, this code makes use of pandas dataframes: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache.py :caption: development_lifecycle/task_cache.py -:lines: 39-54 +:lines: 44-59 ``` If run twice with the same inputs, one would expect that `bar` would trigger a cache hit, but it turns out that's not the case because of how dataframes are represented in Flyte. However, with release 1.2.0, Flyte provides a new way to control memoization behavior of literals. This is done via a `typing.Annotated` call on the task signature. For example, in order to cache the result of calls to `bar`, you can rewrite the code above like this: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache.py :caption: development_lifecycle/task_cache.py -:lines: 64-85 +:lines: 69-91 ``` Note how the output of task `foo` is annotated with an object of type `HashMethod`. Essentially, it represents a function that produces a hash that is used as part of the cache key calculation in calling the task `bar`. @@ -175,9 +175,9 @@ This feature also works in local execution. Here's a complete example of the feature: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/task_cache.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/task_cache.py :caption: development_lifecycle/task_cache.py -:lines: 97-134 +:lines: 103-140 ``` [flytesnacks]: https://github.com/flyteorg/flytesnacks/tree/master/examples/development_lifecycle/ diff --git a/docs/user_guide/development_lifecycle/decks.md b/docs/user_guide/development_lifecycle/decks.md index 366302d49e..68887615b3 100644 --- a/docs/user_guide/development_lifecycle/decks.md +++ b/docs/user_guide/development_lifecycle/decks.md @@ -28,7 +28,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte To begin, import the dependencies: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :lines: 1-4 ``` @@ -39,7 +39,7 @@ We create a new deck named `pca` and render Markdown content along with a You can begin by initializing an {ref}`ImageSpec ` object to encompass all the necessary dependencies. This approach automatically triggers a Docker build, alleviating the need for you to manually create a Docker image. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/b431ae399def3a749833fe81c2c291b016cf3213/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :lines: 15-27 ``` @@ -51,7 +51,7 @@ To upload the image to the local registry in the demo cluster, indicate the regi Note the usage of `append` to append the Plotly deck to the Markdown deck. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :pyobject: pca_plot ``` @@ -96,7 +96,7 @@ When the task connected with a deck object is executed, these objects employ ren Creates a profile report from a Pandas DataFrame. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/b431ae399def3a749833fe81c2c291b016cf3213/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :lines: 56-63 ``` @@ -113,7 +113,7 @@ Creates a profile report from a Pandas DataFrame. Renders DataFrame as an HTML table. This renderer doesn't necessitate plugin installation since it's accessible within the flytekit library. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/b431ae399def3a749833fe81c2c291b016cf3213/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :lines: 69-76 ``` @@ -127,7 +127,7 @@ This renderer doesn't necessitate plugin installation since it's accessible with Converts a Markdown string into HTML, producing HTML as a Unicode string. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :pyobject: markdown_renderer ``` @@ -147,7 +147,7 @@ The median (Q2) is indicated by a line within the box. Typically, the whiskers extend to the edges of the box, plus or minus 1.5 times the interquartile range (IQR: Q3-Q1). -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/b431ae399def3a749833fe81c2c291b016cf3213/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :lines: 97-103 ``` @@ -162,7 +162,7 @@ plus or minus 1.5 times the interquartile range (IQR: Q3-Q1). Converts a {ref}`FlyteFile ` or `PIL.Image.Image` object into an HTML string, where the image data is encoded as a base64 string. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/b431ae399def3a749833fe81c2c291b016cf3213/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :lines: 109-123 ``` @@ -176,7 +176,7 @@ where the image data is encoded as a base64 string. Converts a Pandas dataframe into an HTML table. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/b431ae399def3a749833fe81c2c291b016cf3213/examples/development_lifecycle/development_lifecycle/decks.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/decks.py :caption: development_lifecycle/decks.py :lines: 127-135 ``` diff --git a/docs/user_guide/development_lifecycle/failure_node.md b/docs/user_guide/development_lifecycle/failure_node.md index 9bf7e2dd52..3e3cab7149 100644 --- a/docs/user_guide/development_lifecycle/failure_node.md +++ b/docs/user_guide/development_lifecycle/failure_node.md @@ -20,21 +20,21 @@ To address this issue, you can add a failure node into your workflow. This ensur To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/failure_node.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/failure_node.py :caption: development_lifecycle/failure_node.py :lines: 1-6 ``` Create a task that will fail during execution: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/failure_node.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/failure_node.py :caption: development_lifecycle/failure_node.py :lines: 10-18 ``` Create a task that will be executed if any of the tasks in the workflow fail: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/failure_node.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/failure_node.py :caption: development_lifecycle/failure_node.py :pyobject: clean_up ``` @@ -45,21 +45,21 @@ Specify the `on_failure` to a cleanup task. This task will be executed if any of The input of `clean_up` should be the exact same as the input of the workflow. ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/failure_node.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/failure_node.py :caption: development_lifecycle/failure_node.py :pyobject: subwf ``` By setting the failure policy to `FAIL_AFTER_EXECUTABLE_NODES_COMPLETE` to ensure that the `wf1` is executed even if the subworkflow fails. In this case, both parent and child workflows will fail, resulting in the `clean_up` task being executed twice: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/failure_node.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/failure_node.py :caption: development_lifecycle/failure_node.py :lines: 42-53 ``` You can also set the `on_failure` to a workflow. This workflow will be executed if any of the tasks in the workflow fail: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/development_lifecycle/development_lifecycle/failure_node.py +```{literalinclude} /examples/development_lifecycle/development_lifecycle/failure_node.py :caption: development_lifecycle/failure_node.py :pyobject: wf2 ``` diff --git a/docs/user_guide/extending/custom_types.md b/docs/user_guide/extending/custom_types.md index a9670e9e8f..92a2ab5a19 100644 --- a/docs/user_guide/extending/custom_types.md +++ b/docs/user_guide/extending/custom_types.md @@ -27,7 +27,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte First, we import the dependencies: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/extending/extending/custom_types.py +```{literalinclude} /examples/extending/extending/custom_types.py :caption: extending/custom_types.py :lines: 1-7 ``` @@ -38,7 +38,7 @@ First, we import the dependencies: Defined type here represents a list of files on the disk. We will refer to it as `MyDataset`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/extending/extending/custom_types.py +```{literalinclude} /examples/extending/extending/custom_types.py :caption: extending/custom_types.py :pyobject: MyDataset ``` @@ -53,7 +53,7 @@ The `TypeTransformer` is a Generic abstract base class. The `Generic` type argum that we want to work with. In this case, it is the `MyDataset` object. ::: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/extending/extending/custom_types.py +```{literalinclude} /examples/extending/extending/custom_types.py :caption: extending/custom_types.py :pyobject: MyDatasetTransformer ``` @@ -61,23 +61,23 @@ that we want to work with. In this case, it is the `MyDataset` object. Before we can use MyDataset in our tasks, we need to let Flytekit know that `MyDataset` should be considered as a valid type. This is done using {py:class}`~flytekit:flytekit.extend.TypeEngine`'s `register` method. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/extending/extending/custom_types.py +```{literalinclude} /examples/extending/extending/custom_types.py :caption: extending/custom_types.py -:lines: 87 +:lines: 86 ``` The new type should be ready to use! Let us write an example generator and consumer for this new datatype. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/extending/extending/custom_types.py +```{literalinclude} /examples/extending/extending/custom_types.py :caption: extending/custom_types.py -:lines: 91-114 +:lines: 90-114 ``` This workflow can be executed and tested locally. Flytekit will exercise the entire path even if you run it locally. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/extending/extending/custom_types.py +```{literalinclude} /examples/extending/extending/custom_types.py :caption: extending/custom_types.py -:lines: 119-120 +:lines: 118-119 ``` [flytesnacks]: https://github.com/flyteorg/flytesnacks/tree/0ec8388759d34566a0ffc0c3c2d7443fd4a3a46f/examples/extending/ diff --git a/docs/user_guide/extending/user_container_task_plugins.md b/docs/user_guide/extending/user_container_task_plugins.md index 99a3adf155..444ad9d646 100644 --- a/docs/user_guide/extending/user_container_task_plugins.md +++ b/docs/user_guide/extending/user_container_task_plugins.md @@ -32,7 +32,7 @@ def wait_and_run(path: str) -> int: return do_next(path=path) ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/extending/extending/user_container.py +```{literalinclude} /examples/extending/extending/user_container.py :caption: extending/user_container.py :lines: 1-6 ``` @@ -42,7 +42,7 @@ def wait_and_run(path: str) -> int: As illustrated above, to achieve this structure we need to create a class named `WaitForObjectStoreFile`, which derives from {py:class}`flytekit.PythonFunctionTask` as follows. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/extending/extending/user_container.py +```{literalinclude} /examples/extending/extending/user_container.py :caption: extending/user_container.py :pyobject: WaitForObjectStoreFile ``` @@ -68,14 +68,14 @@ Refer to the [spark plugin](https://github.com/flyteorg/flytekit/tree/master/plu ### Actual usage -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/extending/extending/user_container.py +```{literalinclude} /examples/extending/extending/user_container.py :caption: extending/user_container.py :lines: 54-69 ``` And of course, you can run the workflow locally using your own new shiny plugin! -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/extending/extending/user_container.py +```{literalinclude} /examples/extending/extending/user_container.py :caption: extending/user_container.py :lines: 73-78 ``` diff --git a/docs/user_guide/flyte_agents/developing_agents.md b/docs/user_guide/flyte_agents/developing_agents.md index f76c662fa0..a8b2050b01 100644 --- a/docs/user_guide/flyte_agents/developing_agents.md +++ b/docs/user_guide/flyte_agents/developing_agents.md @@ -7,6 +7,7 @@ jupytext: --- (developing_agents)= + # Developing agents The Flyte agent framework enables rapid agent development, since agents are decoupled from the core FlytePropeller engine. Rather than building a complete gRPC service from scratch, you can implement an agent as a Python class, easing development. Agents can be tested independently and deployed privately, making maintenance easier and giving you more flexibility and control over development. @@ -20,8 +21,9 @@ We strongly encourage you to contribute your agent to the Flyte community. To do ``` There are two types of agents: **async** and **sync**. -* **Async agents** enable long-running jobs that execute on an external platform over time. They communicate with external services that have asynchronous APIs that support `create`, `get`, and `delete` operations. The vast majority of agents are async agents. -* **Sync agents** enable request/response services that return immediate outputs (e.g. calling an internal API to fetch data or communicating with the OpenAI API). + +- **Async agents** enable long-running jobs that execute on an external platform over time. They communicate with external services that have asynchronous APIs that support `create`, `get`, and `delete` operations. The vast majority of agents are async agents. +- **Sync agents** enable request/response services that return immediate outputs (e.g. calling an internal API to fetch data or communicating with the OpenAI API). ```{note} @@ -41,6 +43,17 @@ To create a new async agent, extend the [`AsyncAgentBase`](https://github.com/fl - `get`: This method retrieves the job resource (jobID or output literal) associated with the task, such as a BigQuery job ID or Databricks task ID. - `delete`: Invoking this method will send a request to delete the corresponding job. +```{note} + +When users use the `create` method to create a new job, with its job ID, they can use the `get` method with job ID to +check the execution state is succeeded or not. + +Exceptional `delete` case: +If users interrupt a task while it is running, FlytePropeller will invoke the `delete` method to the corresponding +job. + +``` + ```python from typing import Optional from dataclasses import dataclass @@ -113,6 +126,7 @@ AgentRegistry.register(OpenAIAgent()) ``` #### Sensor interface specification + With the agent framework, you can easily build a custom sensor in Flyte to watch certain events or monitor the bucket in your workflow. To create a new sensor, extend the `[BaseSensor](https://github.com/flyteorg/flytekit/blob/master/flytekit/sensor/base_sensor.py#L43)` class and implement the `poke` method, which checks whether a specific condition is met. @@ -130,7 +144,6 @@ class FileSensor(BaseSensor): return fs.exists(path) ``` - ### 2. Test the agent You can test your agent in a {ref}`local Python environment ` or in a {ref}`local development cluster `. @@ -181,29 +194,29 @@ By default, all agent requests will be sent to the default agent service. Howeve you can route particular task requests to designated agent services by adjusting the FlytePropeller configuration. ```yaml - plugins: - agent-service: - # By default, all requests will be sent to the default agent. - defaultAgent: - endpoint: "dns:///flyteagent.flyte.svc.cluster.local:8000" - insecure: true - timeouts: - # CreateTask, GetTask and DeleteTask are for async agents. - # ExecuteTaskSync is for sync agents. - CreateTask: 5s - GetTask: 5s - DeleteTask: 5s - ExecuteTaskSync: 10s - defaultTimeout: 10s - agents: - custom_agent: - endpoint: "dns:///custom-flyteagent.flyte.svc.cluster.local:8000" - insecure: false - defaultServiceConfig: '{"loadBalancingConfig": [{"round_robin":{}}]}' - timeouts: - GetTask: 5s - defaultTimeout: 10s - agentForTaskTypes: - # It will override the default agent for custom_task, which means propeller will send the request to this agent. - - custom_task: custom_agent +plugins: + agent-service: + # By default, all requests will be sent to the default agent. + defaultAgent: + endpoint: "dns:///flyteagent.flyte.svc.cluster.local:8000" + insecure: true + timeouts: + # CreateTask, GetTask and DeleteTask are for async agents. + # ExecuteTaskSync is for sync agents. + CreateTask: 5s + GetTask: 5s + DeleteTask: 5s + ExecuteTaskSync: 10s + defaultTimeout: 10s + agents: + custom_agent: + endpoint: "dns:///custom-flyteagent.flyte.svc.cluster.local:8000" + insecure: false + defaultServiceConfig: '{"loadBalancingConfig": [{"round_robin":{}}]}' + timeouts: + GetTask: 5s + defaultTimeout: 10s + agentForTaskTypes: + # It will override the default agent for custom_task, which means propeller will send the request to this agent. + - custom_task: custom_agent ``` diff --git a/docs/user_guide/getting_started_with_workflow_development/flyte_project_components.md b/docs/user_guide/getting_started_with_workflow_development/flyte_project_components.md index 47821c5201..4b6ce43d68 100644 --- a/docs/user_guide/getting_started_with_workflow_development/flyte_project_components.md +++ b/docs/user_guide/getting_started_with_workflow_development/flyte_project_components.md @@ -33,8 +33,8 @@ manage your project's Python requirements. ````{dropdown} See requirements.txt -```{rli} https://raw.githubusercontent.com/flyteorg/flytekit-python-template/main/basic-template-imagespec/%7B%7Bcookiecutter.project_name%7D%7D/requirements.txt -:caption: requirements.txt +```{code-block} bash +flytekit>=1.5.0 ``` ```` diff --git a/docs/user_guide/productionizing/customizing_task_resources.md b/docs/user_guide/productionizing/customizing_task_resources.md index 6ba07a604b..da885ec583 100644 --- a/docs/user_guide/productionizing/customizing_task_resources.md +++ b/docs/user_guide/productionizing/customizing_task_resources.md @@ -36,35 +36,35 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte Import the dependencies: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :lines: 1-3 ``` Define a task and configure the resources to be allocated to it: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :pyobject: count_unique_numbers ``` Define a task that computes the square of a number: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :pyobject: square ``` You can use the tasks decorated with memory and storage hints like regular tasks in a workflow. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :pyobject: my_workflow ``` You can execute the workflow locally. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :lines: 32-34 ``` @@ -82,7 +82,7 @@ Let's understand how the resources can be initialized with an example. Import the dependencies. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :lines: 38-40 ``` @@ -90,28 +90,28 @@ Import the dependencies. Define a task and configure the resources to be allocated to it. You can use tasks decorated with memory and storage hints like regular tasks in a workflow. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :pyobject: count_unique_numbers ``` Define a task that computes the square of a number: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :pyobject: square_1 ``` The `with_overrides` method overrides the old resource allocations: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :pyobject: my_pipeline ``` You can execute the workflow locally: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/customizing_resources.py +```{literalinclude} /examples/productionizing/productionizing/customizing_resources.py :caption: productionizing/customizing_resources.py :lines: 65-67 ``` diff --git a/docs/user_guide/productionizing/reference_launch_plans.md b/docs/user_guide/productionizing/reference_launch_plans.md index bce75e4681..79b4142baf 100644 --- a/docs/user_guide/productionizing/reference_launch_plans.md +++ b/docs/user_guide/productionizing/reference_launch_plans.md @@ -16,7 +16,7 @@ Reference launch plans cannot be run locally. You must mock them out. To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/reference_launch_plan.py +```{literalinclude} /examples/productionizing/productionizing/reference_launch_plan.py :caption: productionizing/reference_launch_plan.py :lines: 1-36 ``` diff --git a/docs/user_guide/productionizing/reference_tasks.md b/docs/user_guide/productionizing/reference_tasks.md index d91ecd4bbc..0d611608a6 100644 --- a/docs/user_guide/productionizing/reference_tasks.md +++ b/docs/user_guide/productionizing/reference_tasks.md @@ -16,7 +16,7 @@ Reference tasks cannot be run locally. You must mock them out. To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/reference_task.py +```{literalinclude} /examples/productionizing/productionizing/reference_task.py :caption: productionizing/reference_task.py :lines: 1-36 ``` diff --git a/docs/user_guide/productionizing/schedules.md b/docs/user_guide/productionizing/schedules.md index fa0a6eedb0..6d28906073 100644 --- a/docs/user_guide/productionizing/schedules.md +++ b/docs/user_guide/productionizing/schedules.md @@ -25,7 +25,7 @@ To clone and run the example code on this page, see the [Flytesnacks repo][flyte Consider the following example workflow: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/lp_schedules.py +```{literalinclude} /examples/productionizing/productionizing/lp_schedules.py :caption: productionizing/lp_schedules.py :lines: 1-14 ``` @@ -39,7 +39,7 @@ The `date_formatter_wf` workflow can be scheduled using either the `CronSchedule [Cron](https://en.wikipedia.org/wiki/Cron) expression strings use this {ref}`syntax `. An incorrect cron schedule expression would lead to failure in triggering the schedule. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/lp_schedules.py +```{literalinclude} /examples/productionizing/productionizing/lp_schedules.py :caption: productionizing/lp_schedules.py :lines: 17-29 ``` @@ -54,7 +54,7 @@ If you prefer to use an interval rather than a cron scheduler to schedule your w Here's an example: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/lp_schedules.py +```{literalinclude} /examples/productionizing/productionizing/lp_schedules.py :caption: productionizing/lp_schedules.py :lines: 34-57 ``` diff --git a/docs/user_guide/productionizing/secrets.md b/docs/user_guide/productionizing/secrets.md index 7eba15e653..538dc5d5ad 100644 --- a/docs/user_guide/productionizing/secrets.md +++ b/docs/user_guide/productionizing/secrets.md @@ -62,7 +62,7 @@ Once you've defined a secret on the Flyte backend, `flytekit` exposes a class called {py:class}`~flytekit.Secret`s, which allows you to request a secret from the configured secret manager: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :lines: 1-6, 49-53 ``` @@ -76,7 +76,7 @@ In the code below we specify two variables, `SECRET_GROUP` and `SECRET_NAME`, which maps onto the `user-info` secret that we created with `kubectl` above, with a key called `user_secret`. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :lines: 66-67 ``` @@ -92,7 +92,7 @@ invoking the {py:func}`flytekit.current_context` function, as shown below. At runtime, flytekit looks inside the task pod for an environment variable or a mounted file with a predefined name/path and loads the value. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :pyobject: secret_task ``` @@ -127,14 +127,14 @@ the same secret: In this case, the secret group will be `user-info`, with three available secret keys: `user_secret`, `username`, and `password`: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :lines: 107-108 ``` The Secret structure allows passing two fields, matching the key and the group, as previously described: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :lines: 113-124 ``` @@ -155,14 +155,14 @@ In these scenarios you can specify the `mount_requirement=Secret.MountType.FILE` In the following example we force the mounting to be an environment variable: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :lines: 139-158 ``` These tasks can be used in your workflow as usual -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :pyobject: my_secret_workflow ``` @@ -172,7 +172,7 @@ These tasks can be used in your workflow as usual The simplest way to test secret accessibility is to export the secret as an environment variable. There are some helper methods available to do so: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/productionizing/productionizing/use_secrets.py +```{literalinclude} /examples/productionizing/productionizing/use_secrets.py :caption: productionizing/use_secrets.py :lines: 172-182 ``` diff --git a/docs/user_guide/testing/mocking_tasks.md b/docs/user_guide/testing/mocking_tasks.md index b95af69b14..eb1c396075 100644 --- a/docs/user_guide/testing/mocking_tasks.md +++ b/docs/user_guide/testing/mocking_tasks.md @@ -6,42 +6,42 @@ A lot of the tasks that you write you can run locally, but some of them you will To clone and run the example code on this page, see the [Flytesnacks repo][flytesnacks]. ``` -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/testing/testing/mocking.py +```{literalinclude} /examples/testing/testing/mocking.py :caption: testing/mocking.py :lines: 1-6 ``` This is a generic SQL task (and is by default not hooked up to any datastore nor handled by any plugin), and must be mocked: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/testing/testing/mocking.py +```{literalinclude} /examples/testing/testing/mocking.py :caption: testing/mocking.py :lines: 10-16 ``` This is a task that can run locally: -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/testing/testing/mocking.py +```{literalinclude} /examples/testing/testing/mocking.py :caption: testing/mocking.py :pyobject: t1 ``` Declare a workflow that chains these two tasks together. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/testing/testing/mocking.py +```{literalinclude} /examples/testing/testing/mocking.py :caption: testing/mocking.py :pyobject: my_wf ``` Without a mock, calling the workflow would typically raise an exception, but with the `task_mock` construct, which returns a `MagicMock` object, we can override the return value. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/testing/testing/mocking.py +```{literalinclude} /examples/testing/testing/mocking.py :caption: testing/mocking.py :pyobject: main_1 ``` There is another utility as well called `patch` which offers the same functionality, but in the traditional Python patching style, where the first argument is the `MagicMock` object. -```{rli} https://raw.githubusercontent.com/flyteorg/flytesnacks/69dbe4840031a85d79d9ded25f80397c6834752d/examples/testing/testing/mocking.py +```{literalinclude} /examples/testing/testing/mocking.py :caption: testing/mocking.py :lines: 45-56 ``` diff --git a/flyteadmin/.golangci.yml b/flyteadmin/.golangci.yml index cd180b89d1..b3e4f05997 100644 --- a/flyteadmin/.golangci.yml +++ b/flyteadmin/.golangci.yml @@ -3,30 +3,25 @@ run: # because we're skipping TLS verification - for now - cmd/entrypoints/serve.go - pkg/async/messages/sqs.go - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - gci - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unparam - unused - - varcheck - + - protogetter linters-settings: gci: custom-order: true @@ -35,6 +30,8 @@ linters-settings: - default - prefix(github.com/flyteorg) skip-generated: true + goconst: + ignore-tests: true issues: exclude-rules: - path: pkg/workflowengine/impl/prepare_execution.go diff --git a/flyteadmin/auth/authzserver/claims_verifier_test.go b/flyteadmin/auth/authzserver/claims_verifier_test.go index dca3cf6e22..568b248ccd 100644 --- a/flyteadmin/auth/authzserver/claims_verifier_test.go +++ b/flyteadmin/auth/authzserver/claims_verifier_test.go @@ -30,7 +30,7 @@ func Test_verifyClaims(t *testing.T) { assert.Equal(t, "my-client", identityCtx.AppID()) assert.Equal(t, "123", identityCtx.UserID()) assert.Equal(t, "https://myserver", identityCtx.Audience()) - assert.Equal(t, "byhsu@linkedin.com", identityCtx.UserInfo().Email) + assert.Equal(t, "byhsu@linkedin.com", identityCtx.UserInfo().GetEmail()) }) t.Run("Multiple audience", func(t *testing.T) { diff --git a/flyteadmin/auth/authzserver/metadata_provider_test.go b/flyteadmin/auth/authzserver/metadata_provider_test.go index c8f92fe8cc..c02825360f 100644 --- a/flyteadmin/auth/authzserver/metadata_provider_test.go +++ b/flyteadmin/auth/authzserver/metadata_provider_test.go @@ -35,10 +35,10 @@ func TestOAuth2MetadataProvider_FlyteClient(t *testing.T) { ctx := context.Background() resp, err := provider.GetPublicClientConfig(ctx, &service.PublicClientAuthConfigRequest{}) assert.NoError(t, err) - assert.Equal(t, "my-client", resp.ClientId) - assert.Equal(t, "client/", resp.RedirectUri) - assert.Equal(t, []string{"all"}, resp.Scopes) - assert.Equal(t, "http://dummyServer", resp.Audience) + assert.Equal(t, "my-client", resp.GetClientId()) + assert.Equal(t, "client/", resp.GetRedirectUri()) + assert.Equal(t, []string{"all"}, resp.GetScopes()) + assert.Equal(t, "http://dummyServer", resp.GetAudience()) } func TestOAuth2MetadataProvider_OAuth2Metadata(t *testing.T) { @@ -50,7 +50,7 @@ func TestOAuth2MetadataProvider_OAuth2Metadata(t *testing.T) { ctx := context.Background() resp, err := provider.GetOAuth2Metadata(ctx, &service.OAuth2MetadataRequest{}) assert.NoError(t, err) - assert.Equal(t, "https://issuer/", resp.Issuer) + assert.Equal(t, "https://issuer/", resp.GetIssuer()) }) var issuer string @@ -91,7 +91,7 @@ func TestOAuth2MetadataProvider_OAuth2Metadata(t *testing.T) { ctx := context.Background() resp, err := provider.GetOAuth2Metadata(ctx, &service.OAuth2MetadataRequest{}) assert.NoError(t, err) - assert.Equal(t, "https://dev-14186422.okta.com", resp.Issuer) + assert.Equal(t, "https://dev-14186422.okta.com", resp.GetIssuer()) }) t.Run("External AuthServer fallback url", func(t *testing.T) { @@ -110,7 +110,7 @@ func TestOAuth2MetadataProvider_OAuth2Metadata(t *testing.T) { ctx := context.Background() resp, err := provider.GetOAuth2Metadata(ctx, &service.OAuth2MetadataRequest{}) assert.NoError(t, err) - assert.Equal(t, "https://dev-14186422.okta.com", resp.Issuer) + assert.Equal(t, "https://dev-14186422.okta.com", resp.GetIssuer()) }) } diff --git a/flyteadmin/auth/authzserver/resource_server.go b/flyteadmin/auth/authzserver/resource_server.go index 59b984365d..917e0fcfaf 100644 --- a/flyteadmin/auth/authzserver/resource_server.go +++ b/flyteadmin/auth/authzserver/resource_server.go @@ -116,7 +116,7 @@ func getJwksForIssuer(ctx context.Context, issuerBaseURL url.URL, cfg authConfig return nil, fmt.Errorf("failed to decode provider discovery object: %v", err) } - return oidc.NewRemoteKeySet(oidc.ClientContext(ctx, httpClient), p.JwksUri), nil + return oidc.NewRemoteKeySet(oidc.ClientContext(ctx, httpClient), p.GetJwksUri()), nil } // NewOAuth2ResourceServer initializes a new OAuth2ResourceServer. diff --git a/flyteadmin/auth/authzserver/resource_server_test.go b/flyteadmin/auth/authzserver/resource_server_test.go index 6ee94184ed..9541e73ccd 100644 --- a/flyteadmin/auth/authzserver/resource_server_test.go +++ b/flyteadmin/auth/authzserver/resource_server_test.go @@ -61,7 +61,7 @@ func newMockResourceServer(t testing.TB, publicKey rsa.PublicKey) (resourceServe } w.Header().Set("Content-Type", "application/json") - _, err = io.WriteString(w, string(raw)) + _, err = io.Writer.Write(w, raw) if !assert.NoError(t, err) { t.FailNow() diff --git a/flyteadmin/auth/cookie.go b/flyteadmin/auth/cookie.go index 2470220d24..456eeb8580 100644 --- a/flyteadmin/auth/cookie.go +++ b/flyteadmin/auth/cookie.go @@ -12,6 +12,7 @@ import ( "github.com/gorilla/securecookie" "github.com/flyteorg/flyte/flyteadmin/auth/interfaces" + "github.com/flyteorg/flyte/flyteadmin/pkg/config" "github.com/flyteorg/flyte/flytestdlib/errors" "github.com/flyteorg/flyte/flytestdlib/logger" ) @@ -68,6 +69,8 @@ func NewSecureCookie(cookieName, value string, hashKey, blockKey []byte, domain Value: encoded, Domain: domain, SameSite: sameSiteMode, + HttpOnly: true, + Secure: !config.GetConfig().Security.InsecureCookieHeader, }, nil } @@ -126,6 +129,7 @@ func NewCsrfCookie() http.Cookie { Value: csrfStateToken, SameSite: http.SameSiteLaxMode, HttpOnly: true, + Secure: !config.GetConfig().Security.InsecureCookieHeader, } } @@ -164,6 +168,7 @@ func NewRedirectCookie(ctx context.Context, redirectURL string) *http.Cookie { Value: urlObj.String(), SameSite: http.SameSiteLaxMode, HttpOnly: true, + Secure: !config.GetConfig().Security.InsecureCookieHeader, } } diff --git a/flyteadmin/auth/cookie_manager.go b/flyteadmin/auth/cookie_manager.go index ce360c9d3a..8a23272d01 100644 --- a/flyteadmin/auth/cookie_manager.go +++ b/flyteadmin/auth/cookie_manager.go @@ -11,6 +11,7 @@ import ( "golang.org/x/oauth2" "github.com/flyteorg/flyte/flyteadmin/auth/config" + serverConfig "github.com/flyteorg/flyte/flyteadmin/pkg/config" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/errors" "github.com/flyteorg/flyte/flytestdlib/logger" @@ -218,6 +219,7 @@ func (c *CookieManager) getLogoutCookie(name string) *http.Cookie { Domain: c.domain, MaxAge: 0, HttpOnly: true, + Secure: !serverConfig.GetConfig().Security.InsecureCookieHeader, Expires: time.Now().Add(-1 * time.Hour), } } diff --git a/flyteadmin/auth/cookie_manager_test.go b/flyteadmin/auth/cookie_manager_test.go index 09d8468e83..444056ba8c 100644 --- a/flyteadmin/auth/cookie_manager_test.go +++ b/flyteadmin/auth/cookie_manager_test.go @@ -16,6 +16,7 @@ import ( "golang.org/x/oauth2" "github.com/flyteorg/flyte/flyteadmin/auth/config" + serverConfig "github.com/flyteorg/flyte/flyteadmin/pkg/config" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" ) @@ -199,34 +200,53 @@ func TestCookieManager(t *testing.T) { assert.EqualError(t, err, "[EMPTY_OAUTH_TOKEN] Error reading existing secure cookie [flyte_idt]. Error: [SECURE_COOKIE_ERROR] Error reading secure cookie flyte_idt, caused by: securecookie: error - caused by: crypto/aes: invalid key size 75") }) - t.Run("delete_cookies", func(t *testing.T) { - w := httptest.NewRecorder() - - manager.DeleteCookies(ctx, w) - - cookies := w.Result().Cookies() - require.Equal(t, 5, len(cookies)) - - assert.True(t, time.Now().After(cookies[0].Expires)) - assert.Equal(t, cookieSetting.Domain, cookies[0].Domain) - assert.Equal(t, accessTokenCookieName, cookies[0].Name) - - assert.True(t, time.Now().After(cookies[1].Expires)) - assert.Equal(t, cookieSetting.Domain, cookies[1].Domain) - assert.Equal(t, accessTokenCookieNameSplitFirst, cookies[1].Name) - - assert.True(t, time.Now().After(cookies[2].Expires)) - assert.Equal(t, cookieSetting.Domain, cookies[2].Domain) - assert.Equal(t, accessTokenCookieNameSplitSecond, cookies[2].Name) - - assert.True(t, time.Now().After(cookies[3].Expires)) - assert.Equal(t, cookieSetting.Domain, cookies[3].Domain) - assert.Equal(t, refreshTokenCookieName, cookies[3].Name) + tests := []struct { + name string + insecureCookieHeader bool + expectedSecure bool + }{ + { + name: "secure_cookies", + insecureCookieHeader: false, + expectedSecure: true, + }, + { + name: "insecure_cookies", + insecureCookieHeader: true, + expectedSecure: false, + }, + } - assert.True(t, time.Now().After(cookies[4].Expires)) - assert.Equal(t, cookieSetting.Domain, cookies[4].Domain) - assert.Equal(t, idTokenCookieName, cookies[4].Name) - }) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + w := httptest.NewRecorder() + + serverConfig.SetConfig(&serverConfig.ServerConfig{ + Security: serverConfig.ServerSecurityOptions{ + InsecureCookieHeader: tt.insecureCookieHeader, + }, + }) + + manager.DeleteCookies(ctx, w) + + cookies := w.Result().Cookies() + require.Equal(t, 5, len(cookies)) + + // Check secure flag for each cookie + for _, cookie := range cookies { + assert.Equal(t, tt.expectedSecure, cookie.Secure) + assert.True(t, time.Now().After(cookie.Expires)) + assert.Equal(t, cookieSetting.Domain, cookie.Domain) + } + + // Check cookie names + assert.Equal(t, accessTokenCookieName, cookies[0].Name) + assert.Equal(t, accessTokenCookieNameSplitFirst, cookies[1].Name) + assert.Equal(t, accessTokenCookieNameSplitSecond, cookies[2].Name) + assert.Equal(t, refreshTokenCookieName, cookies[3].Name) + assert.Equal(t, idTokenCookieName, cookies[4].Name) + }) + } t.Run("get_http_same_site_policy", func(t *testing.T) { manager.sameSitePolicy = config.SameSiteLaxMode diff --git a/flyteadmin/auth/cookie_test.go b/flyteadmin/auth/cookie_test.go index a5c58ad2ff..1134e957dc 100644 --- a/flyteadmin/auth/cookie_test.go +++ b/flyteadmin/auth/cookie_test.go @@ -4,7 +4,6 @@ import ( "bytes" "context" "encoding/base64" - "fmt" "net/http" "net/url" "testing" @@ -14,6 +13,7 @@ import ( "github.com/flyteorg/flyte/flyteadmin/auth/config" "github.com/flyteorg/flyte/flyteadmin/auth/interfaces/mocks" + serverConfig "github.com/flyteorg/flyte/flyteadmin/pkg/config" stdConfig "github.com/flyteorg/flyte/flytestdlib/config" ) @@ -26,22 +26,53 @@ func mustParseURL(t testing.TB, u string) url.URL { return *res } -// This function can also be called locally to generate new keys func TestSecureCookieLifecycle(t *testing.T) { - hashKey := securecookie.GenerateRandomKey(64) - assert.True(t, base64.RawStdEncoding.EncodeToString(hashKey) != "") - - blockKey := securecookie.GenerateRandomKey(32) - assert.True(t, base64.RawStdEncoding.EncodeToString(blockKey) != "") - fmt.Printf("Hash key: |%s| Block key: |%s|\n", - base64.RawStdEncoding.EncodeToString(hashKey), base64.RawStdEncoding.EncodeToString(blockKey)) - - cookie, err := NewSecureCookie("choc", "chip", hashKey, blockKey, "localhost", http.SameSiteLaxMode) - assert.NoError(t, err) + tests := []struct { + name string + insecureCookieHeader bool + expectedSecure bool + }{ + { + name: "secure_cookie", + insecureCookieHeader: false, + expectedSecure: true, + }, + { + name: "insecure_cookie", + insecureCookieHeader: true, + expectedSecure: false, + }, + } - value, err := ReadSecureCookie(context.Background(), cookie, hashKey, blockKey) - assert.NoError(t, err) - assert.Equal(t, "chip", value) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Generate hash and block keys for secure cookie + hashKey := securecookie.GenerateRandomKey(64) + assert.True(t, base64.RawStdEncoding.EncodeToString(hashKey) != "") + + blockKey := securecookie.GenerateRandomKey(32) + assert.True(t, base64.RawStdEncoding.EncodeToString(blockKey) != "") + + // Set up server configuration with insecureCookieHeader option + serverConfig.SetConfig(&serverConfig.ServerConfig{ + Security: serverConfig.ServerSecurityOptions{ + InsecureCookieHeader: tt.insecureCookieHeader, + }, + }) + + // Create a secure cookie + cookie, err := NewSecureCookie("choc", "chip", hashKey, blockKey, "localhost", http.SameSiteLaxMode) + assert.NoError(t, err) + + // Validate the Secure attribute of the cookie + assert.Equal(t, tt.expectedSecure, cookie.Secure) + + // Read and validate the secure cookie value + value, err := ReadSecureCookie(context.Background(), cookie, hashKey, blockKey) + assert.NoError(t, err) + assert.Equal(t, "chip", value) + }) + } } func TestNewCsrfToken(t *testing.T) { @@ -50,9 +81,41 @@ func TestNewCsrfToken(t *testing.T) { } func TestNewCsrfCookie(t *testing.T) { - cookie := NewCsrfCookie() - assert.Equal(t, "flyte_csrf_state", cookie.Name) - assert.True(t, cookie.HttpOnly) + tests := []struct { + name string + insecureCookieHeader bool + expectedSecure bool + }{ + { + name: "secure_csrf_cookie", + insecureCookieHeader: false, + expectedSecure: true, + }, + { + name: "insecure_csrf_cookie", + insecureCookieHeader: true, + expectedSecure: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Set up server configuration with insecureCookieHeader option + serverConfig.SetConfig(&serverConfig.ServerConfig{ + Security: serverConfig.ServerSecurityOptions{ + InsecureCookieHeader: tt.insecureCookieHeader, + }, + }) + + // Generate CSRF cookie + cookie := NewCsrfCookie() + + // Validate CSRF cookie properties + assert.Equal(t, "flyte_csrf_state", cookie.Name) + assert.True(t, cookie.HttpOnly) + assert.Equal(t, tt.expectedSecure, cookie.Secure) + }) + } } func TestHashCsrfState(t *testing.T) { @@ -121,6 +184,36 @@ func TestNewRedirectCookie(t *testing.T) { assert.NotNil(t, cookie) assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite) }) + + tests := []struct { + name string + insecureCookieHeader bool + expectedSecure bool + }{ + { + name: "secure_cookies", + insecureCookieHeader: false, + expectedSecure: true, + }, + { + name: "insecure_cookies", + insecureCookieHeader: true, + expectedSecure: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + serverConfig.SetConfig(&serverConfig.ServerConfig{ + Security: serverConfig.ServerSecurityOptions{ + InsecureCookieHeader: tt.insecureCookieHeader, + }, + }) + ctx := context.Background() + cookie := NewRedirectCookie(ctx, "http://www.example.com/postLogin") + assert.NotNil(t, cookie) + assert.Equal(t, cookie.Secure, tt.expectedSecure) + }) + } } func TestGetAuthFlowEndRedirect(t *testing.T) { diff --git a/flyteadmin/auth/handler_utils.go b/flyteadmin/auth/handler_utils.go index a6b4031ca8..dd7a97d1af 100644 --- a/flyteadmin/auth/handler_utils.go +++ b/flyteadmin/auth/handler_utils.go @@ -168,6 +168,7 @@ func GetRedirectURLAllowed(ctx context.Context, urlRedirectParam string, cfg *co } logger.Debugf(ctx, "validating whether redirect url: %s is authorized", redirectURL) for _, authorizedURI := range cfg.AuthorizedURIs { + authorizedURI := authorizedURI if isAuthorizedRedirectURL(redirectURL, &authorizedURI.URL) { logger.Debugf(ctx, "authorizing redirect url: %s against authorized uri: %s", redirectURL.String(), authorizedURI.String()) return true diff --git a/flyteadmin/auth/handlers.go b/flyteadmin/auth/handlers.go index d8bc626652..002744fbd1 100644 --- a/flyteadmin/auth/handlers.go +++ b/flyteadmin/auth/handlers.go @@ -524,8 +524,8 @@ func GetUserInfoForwardResponseHandler() UserInfoForwardResponseHandler { return func(ctx context.Context, w http.ResponseWriter, m proto.Message) error { info, ok := m.(*service.UserInfoResponse) if ok { - if info.AdditionalClaims != nil { - for k, v := range info.AdditionalClaims.GetFields() { + if info.GetAdditionalClaims() != nil { + for k, v := range info.GetAdditionalClaims().GetFields() { jsonBytes, err := v.MarshalJSON() if err != nil { logger.Warningf(ctx, "failed to marshal claim [%s] to json: %v", k, err) @@ -535,7 +535,7 @@ func GetUserInfoForwardResponseHandler() UserInfoForwardResponseHandler { w.Header().Set(header, string(jsonBytes)) } } - w.Header().Set("X-User-Subject", info.Subject) + w.Header().Set("X-User-Subject", info.GetSubject()) } return nil } diff --git a/flyteadmin/auth/identity_context.go b/flyteadmin/auth/identity_context.go index 05889f7537..ab30088f01 100644 --- a/flyteadmin/auth/identity_context.go +++ b/flyteadmin/auth/identity_context.go @@ -103,7 +103,7 @@ func NewIdentityContext(audience, userID, appID string, authenticatedAt time.Tim userInfo = &service.UserInfoResponse{} } - if len(userInfo.Subject) == 0 { + if len(userInfo.GetSubject()) == 0 { userInfo.Subject = userID } diff --git a/flyteadmin/auth/identity_context_test.go b/flyteadmin/auth/identity_context_test.go index 1e72042be0..fb339027a9 100644 --- a/flyteadmin/auth/identity_context_test.go +++ b/flyteadmin/auth/identity_context_test.go @@ -22,7 +22,7 @@ func TestGetClaims(t *testing.T) { assert.NoError(t, err) assert.EqualValues(t, claims, withClaimsCtx.Claims()) - assert.NotEmpty(t, withClaimsCtx.UserInfo().AdditionalClaims) + assert.NotEmpty(t, withClaimsCtx.UserInfo().GetAdditionalClaims()) } func TestWithExecutionUserIdentifier(t *testing.T) { diff --git a/flyteadmin/auth/init_secrets.go b/flyteadmin/auth/init_secrets.go index 6e3d4a3078..fa964c57e9 100644 --- a/flyteadmin/auth/init_secrets.go +++ b/flyteadmin/auth/init_secrets.go @@ -8,7 +8,6 @@ import ( "encoding/base64" "encoding/pem" "fmt" - "io/ioutil" "os" "path/filepath" @@ -78,21 +77,21 @@ type SecretsSet struct { } func writeSecrets(ctx context.Context, secrets SecretsSet, path string) error { - err := ioutil.WriteFile(filepath.Join(path, config.SecretNameClaimSymmetricKey), []byte(base64.RawStdEncoding.EncodeToString(secrets.TokenHashKey)), os.ModePerm) + err := os.WriteFile(filepath.Join(path, config.SecretNameClaimSymmetricKey), []byte(base64.RawStdEncoding.EncodeToString(secrets.TokenHashKey)), os.ModePerm) // #nosec G306 if err != nil { return fmt.Errorf("failed to persist token hash key. Error: %w", err) } logger.Infof(ctx, "wrote %v", config.SecretNameClaimSymmetricKey) - err = ioutil.WriteFile(filepath.Join(path, config.SecretNameCookieHashKey), []byte(base64.RawStdEncoding.EncodeToString(secrets.CookieHashKey)), os.ModePerm) + err = os.WriteFile(filepath.Join(path, config.SecretNameCookieHashKey), []byte(base64.RawStdEncoding.EncodeToString(secrets.CookieHashKey)), os.ModePerm) // #nosec G306 if err != nil { return fmt.Errorf("failed to persist cookie hash key. Error: %w", err) } logger.Infof(ctx, "wrote %v", config.SecretNameCookieHashKey) - err = ioutil.WriteFile(filepath.Join(path, config.SecretNameCookieBlockKey), []byte(base64.RawStdEncoding.EncodeToString(secrets.CookieBlockKey)), os.ModePerm) + err = os.WriteFile(filepath.Join(path, config.SecretNameCookieBlockKey), []byte(base64.RawStdEncoding.EncodeToString(secrets.CookieBlockKey)), os.ModePerm) // #nosec G306 if err != nil { return fmt.Errorf("failed to persist cookie block key. Error: %w", err) } diff --git a/flyteadmin/dataproxy/service.go b/flyteadmin/dataproxy/service.go index c02fa3699f..27d03e3eda 100644 --- a/flyteadmin/dataproxy/service.go +++ b/flyteadmin/dataproxy/service.go @@ -49,22 +49,22 @@ func (s Service) CreateUploadLocation(ctx context.Context, req *service.CreateUp // If it exists, and a hash was provided, then check if it matches. If it matches, then proceed as normal otherwise fail. // If it doesn't exist, then proceed as normal. - if len(req.Project) == 0 || len(req.Domain) == 0 { - logger.Infof(ctx, "project and domain are required parameters. Project [%v]. Domain [%v]", req.Project, req.Domain) + if len(req.GetProject()) == 0 || len(req.GetDomain()) == 0 { + logger.Infof(ctx, "project and domain are required parameters. Project [%v]. Domain [%v]", req.GetProject(), req.GetDomain()) return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "project and domain are required parameters") } // At least one of the hash or manually given prefix must be provided. - if len(req.FilenameRoot) == 0 && len(req.ContentMd5) == 0 { - logger.Infof(ctx, "content_md5 or filename_root is a required parameter. FilenameRoot [%v], ContentMD5 [%v]", req.FilenameRoot, req.ContentMd5) + if len(req.GetFilenameRoot()) == 0 && len(req.GetContentMd5()) == 0 { + logger.Infof(ctx, "content_md5 or filename_root is a required parameter. FilenameRoot [%v], ContentMD5 [%v]", req.GetFilenameRoot(), req.GetContentMd5()) return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "content_md5 or filename_root is a required parameter") } // If we fall in here, that means that the full path is deterministic and we should check for existence. - if len(req.Filename) > 0 && len(req.FilenameRoot) > 0 { + if len(req.GetFilename()) > 0 && len(req.GetFilenameRoot()) > 0 { knownLocation, err := createStorageLocation(ctx, s.dataStore, s.cfg.Upload, - req.Org, req.Project, req.Domain, req.FilenameRoot, req.Filename) + req.GetOrg(), req.GetProject(), req.GetDomain(), req.GetFilenameRoot(), req.GetFilename()) if err != nil { logger.Errorf(ctx, "failed to create storage location. Error %v", err) return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to create storage location, Error: %v", err) @@ -78,15 +78,15 @@ func (s Service) CreateUploadLocation(ctx context.Context, req *service.CreateUp // Basically if the file exists, then error unless the user also provided a hash and it matches. // Keep in mind this is just a best effort attempt. There can easily be race conditions where two users // request the same file at the same time and one of the writes is lost. - if len(req.ContentMd5) == 0 { + if len(req.GetContentMd5()) == 0 { return nil, errors.NewFlyteAdminErrorf(codes.AlreadyExists, "file already exists at location [%v], specify a matching hash if you wish to rewrite", knownLocation) } - base64Digest := base64.StdEncoding.EncodeToString(req.ContentMd5) + base64Digest := base64.StdEncoding.EncodeToString(req.GetContentMd5()) if len(metadata.ContentMD5()) == 0 { // For backward compatibility, dataproxy assumes that the Etag exists if ContentMD5 is not in the metadata. // Data proxy won't allow people to overwrite the file if both the Etag and the ContentMD5 do not exist. - hexDigest := hex.EncodeToString(req.ContentMd5) - base32Digest := base32.StdEncoding.EncodeToString(req.ContentMd5) + hexDigest := hex.EncodeToString(req.GetContentMd5()) + base32Digest := base32.StdEncoding.EncodeToString(req.GetContentMd5()) if hexDigest != metadata.Etag() && base32Digest != metadata.Etag() && base64Digest != metadata.Etag() { logger.Errorf(ctx, "File already exists at location [%v] but hashes do not match", knownLocation) return nil, errors.NewFlyteAdminErrorf(codes.AlreadyExists, "file already exists at location [%v], specify a matching hash if you wish to rewrite", knownLocation) @@ -99,7 +99,7 @@ func (s Service) CreateUploadLocation(ctx context.Context, req *service.CreateUp } } - if expiresIn := req.ExpiresIn; expiresIn != nil { + if expiresIn := req.GetExpiresIn(); expiresIn != nil { if !expiresIn.IsValid() { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "expiresIn [%v] is invalid", expiresIn) } @@ -112,21 +112,21 @@ func (s Service) CreateUploadLocation(ctx context.Context, req *service.CreateUp req.ExpiresIn = durationpb.New(s.cfg.Upload.MaxExpiresIn.Duration) } - if len(req.Filename) == 0 { + if len(req.GetFilename()) == 0 { req.Filename = rand.String(s.cfg.Upload.DefaultFileNameLength) } - base64digestMD5 := base64.StdEncoding.EncodeToString(req.ContentMd5) + base64digestMD5 := base64.StdEncoding.EncodeToString(req.GetContentMd5()) var prefix string - if len(req.FilenameRoot) > 0 { - prefix = req.FilenameRoot + if len(req.GetFilenameRoot()) > 0 { + prefix = req.GetFilenameRoot() } else { // url safe base32 encoding - prefix = base32.StdEncoding.EncodeToString(req.ContentMd5) + prefix = base32.StdEncoding.EncodeToString(req.GetContentMd5()) } storagePath, err := createStorageLocation(ctx, s.dataStore, s.cfg.Upload, - req.Org, req.Project, req.Domain, prefix, req.Filename) + req.GetOrg(), req.GetProject(), req.GetDomain(), prefix, req.GetFilename()) if err != nil { logger.Errorf(ctx, "failed to create shardedStorageLocation. Error %v", err) return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to create shardedStorageLocation, Error: %v", err) @@ -134,9 +134,9 @@ func (s Service) CreateUploadLocation(ctx context.Context, req *service.CreateUp resp, err := s.dataStore.CreateSignedURL(ctx, storagePath, storage.SignedURLProperties{ Scope: stow.ClientMethodPut, - ExpiresIn: req.ExpiresIn.AsDuration(), + ExpiresIn: req.GetExpiresIn().AsDuration(), ContentMD5: base64digestMD5, - AddContentMD5Metadata: req.AddContentMd5Metadata, + AddContentMD5Metadata: req.GetAddContentMd5Metadata(), }) if err != nil { @@ -147,7 +147,7 @@ func (s Service) CreateUploadLocation(ctx context.Context, req *service.CreateUp return &service.CreateUploadLocationResponse{ SignedUrl: resp.URL.String(), NativeUrl: storagePath.String(), - ExpiresAt: timestamppb.New(time.Now().Add(req.ExpiresIn.AsDuration())), + ExpiresAt: timestamppb.New(time.Now().Add(req.GetExpiresIn().AsDuration())), Headers: resp.RequiredRequestHeaders, }, nil } @@ -172,7 +172,7 @@ func (s Service) CreateDownloadLink(ctx context.Context, req *service.CreateDown switch req.GetArtifactType() { case service.ArtifactType_ARTIFACT_TYPE_DECK: - nativeURL = node.Closure.DeckUri + nativeURL = node.GetClosure().GetDeckUri() } } else { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "unsupported source [%v]", reflect.TypeOf(req.GetSource())) @@ -194,7 +194,7 @@ func (s Service) CreateDownloadLink(ctx context.Context, req *service.CreateDown signedURLResp, err := s.dataStore.CreateSignedURL(ctx, ref, storage.SignedURLProperties{ Scope: stow.ClientMethodGet, - ExpiresIn: req.ExpiresIn.AsDuration(), + ExpiresIn: req.GetExpiresIn().AsDuration(), }) if err != nil { @@ -202,7 +202,7 @@ func (s Service) CreateDownloadLink(ctx context.Context, req *service.CreateDown } u := []string{signedURLResp.URL.String()} - ts := timestamppb.New(time.Now().Add(req.ExpiresIn.AsDuration())) + ts := timestamppb.New(time.Now().Add(req.GetExpiresIn().AsDuration())) // return &service.CreateDownloadLinkResponse{ @@ -223,9 +223,9 @@ func (s Service) CreateDownloadLocation(ctx context.Context, req *service.Create return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "error while validating request: %v", err) } - resp, err := s.dataStore.CreateSignedURL(ctx, storage.DataReference(req.NativeUrl), storage.SignedURLProperties{ + resp, err := s.dataStore.CreateSignedURL(ctx, storage.DataReference(req.GetNativeUrl()), storage.SignedURLProperties{ Scope: stow.ClientMethodGet, - ExpiresIn: req.ExpiresIn.AsDuration(), + ExpiresIn: req.GetExpiresIn().AsDuration(), }) if err != nil { @@ -234,21 +234,21 @@ func (s Service) CreateDownloadLocation(ctx context.Context, req *service.Create return &service.CreateDownloadLocationResponse{ SignedUrl: resp.URL.String(), - ExpiresAt: timestamppb.New(time.Now().Add(req.ExpiresIn.AsDuration())), + ExpiresAt: timestamppb.New(time.Now().Add(req.GetExpiresIn().AsDuration())), }, nil } func (s Service) validateCreateDownloadLocationRequest(req *service.CreateDownloadLocationRequest) error { - validatedExpiresIn, err := validateDuration(req.ExpiresIn, s.cfg.Download.MaxExpiresIn.Duration) + validatedExpiresIn, err := validateDuration(req.GetExpiresIn(), s.cfg.Download.MaxExpiresIn.Duration) if err != nil { return fmt.Errorf("expiresIn is invalid. Error: %w", err) } req.ExpiresIn = validatedExpiresIn - if _, err := url.Parse(req.NativeUrl); err != nil { + if _, err := url.Parse(req.GetNativeUrl()); err != nil { return fmt.Errorf("failed to parse native_url [%v]", - req.NativeUrl) + req.GetNativeUrl()) } return nil @@ -275,7 +275,7 @@ func validateDuration(input *durationpb.Duration, maxAllowed time.Duration) (*du } func (s Service) validateCreateDownloadLinkRequest(req *service.CreateDownloadLinkRequest) (*service.CreateDownloadLinkRequest, error) { - validatedExpiresIn, err := validateDuration(req.ExpiresIn, s.cfg.Download.MaxExpiresIn.Duration) + validatedExpiresIn, err := validateDuration(req.GetExpiresIn(), s.cfg.Download.MaxExpiresIn.Duration) if err != nil { return nil, fmt.Errorf("expiresIn is invalid. Error: %w", err) } @@ -328,16 +328,16 @@ func (s Service) GetCompleteTaskExecutionID(ctx context.Context, taskExecID *cor taskExecs, err := s.taskExecutionManager.ListTaskExecutions(ctx, &admin.TaskExecutionListRequest{ NodeExecutionId: taskExecID.GetNodeExecutionId(), Limit: 1, - Filters: fmt.Sprintf("eq(retry_attempt,%s)", strconv.Itoa(int(taskExecID.RetryAttempt))), + Filters: fmt.Sprintf("eq(retry_attempt,%s)", strconv.Itoa(int(taskExecID.GetRetryAttempt()))), }) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "failed to list task executions [%v]. Error: %v", taskExecID, err) } - if len(taskExecs.TaskExecutions) == 0 { + if len(taskExecs.GetTaskExecutions()) == 0 { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "no task executions were listed [%v]. Error: %v", taskExecID, err) } - taskExec := taskExecs.TaskExecutions[0] - return taskExec.Id, nil + taskExec := taskExecs.GetTaskExecutions()[0] + return taskExec.GetId(), nil } func (s Service) GetTaskExecutionID(ctx context.Context, attempt int, nodeExecID *core.NodeExecutionIdentifier) (*core.TaskExecutionIdentifier, error) { @@ -349,11 +349,11 @@ func (s Service) GetTaskExecutionID(ctx context.Context, attempt int, nodeExecID if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "failed to list task executions [%v]. Error: %v", nodeExecID, err) } - if len(taskExecs.TaskExecutions) == 0 { + if len(taskExecs.GetTaskExecutions()) == 0 { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "no task executions were listed [%v]. Error: %v", nodeExecID, err) } - taskExec := taskExecs.TaskExecutions[0] - return taskExec.Id, nil + taskExec := taskExecs.GetTaskExecutions()[0] + return taskExec.GetId(), nil } func (s Service) GetDataFromNodeExecution(ctx context.Context, nodeExecID *core.NodeExecutionIdentifier, ioType common.ArtifactType, name string) ( @@ -368,9 +368,9 @@ func (s Service) GetDataFromNodeExecution(ctx context.Context, nodeExecID *core. var lm *core.LiteralMap if ioType == common.ArtifactTypeI { - lm = resp.FullInputs + lm = resp.GetFullInputs() } else if ioType == common.ArtifactTypeO { - lm = resp.FullOutputs + lm = resp.GetFullOutputs() } else { // Assume deck, and create a download link request dlRequest := service.CreateDownloadLinkRequest{ @@ -383,13 +383,13 @@ func (s Service) GetDataFromNodeExecution(ctx context.Context, nodeExecID *core. } return &service.GetDataResponse{ Data: &service.GetDataResponse_PreSignedUrls{ - PreSignedUrls: resp.PreSignedUrls, + PreSignedUrls: resp.GetPreSignedUrls(), }, }, nil } if name != "" { - if literal, ok := lm.Literals[name]; ok { + if literal, ok := lm.GetLiterals()[name]; ok { return &service.GetDataResponse{ Data: &service.GetDataResponse_Literal{ Literal: literal, @@ -418,15 +418,15 @@ func (s Service) GetDataFromTaskExecution(ctx context.Context, taskExecID *core. } if ioType == common.ArtifactTypeI { - lm = resp.FullInputs + lm = resp.GetFullInputs() } else if ioType == common.ArtifactTypeO { - lm = resp.FullOutputs + lm = resp.GetFullOutputs() } else { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "deck type cannot be specified with a retry attempt, just use the node instead") } if name != "" { - if literal, ok := lm.Literals[name]; ok { + if literal, ok := lm.GetLiterals()[name]; ok { return &service.GetDataResponse{ Data: &service.GetDataResponse_Literal{ Literal: literal, diff --git a/flyteadmin/go.mod b/flyteadmin/go.mod index 5c008a46eb..82e2189f34 100644 --- a/flyteadmin/go.mod +++ b/flyteadmin/go.mod @@ -21,7 +21,7 @@ require ( github.com/ghodss/yaml v1.0.0 github.com/go-gormigrate/gormigrate/v2 v2.1.1 github.com/golang-jwt/jwt v3.2.2+incompatible - github.com/golang-jwt/jwt/v4 v4.5.0 + github.com/golang-jwt/jwt/v4 v4.5.1 github.com/golang/glog v1.2.0 github.com/golang/protobuf v1.5.3 github.com/google/uuid v1.6.0 @@ -48,7 +48,7 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.9.0 - github.com/wI2L/jsondiff v0.5.0 + github.com/wI2L/jsondiff v0.6.0 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 go.opentelemetry.io/otel v1.24.0 golang.org/x/net v0.27.0 @@ -167,6 +167,7 @@ require ( github.com/prometheus/common v0.53.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect + github.com/santhosh-tekuri/jsonschema v1.2.4 // indirect github.com/sendgrid/rest v2.6.9+incompatible // indirect github.com/shamaton/msgpack/v2 v2.2.2 // indirect github.com/sirupsen/logrus v1.9.3 // indirect @@ -176,10 +177,11 @@ require ( github.com/spf13/viper v1.11.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.2.0 // indirect - github.com/tidwall/gjson v1.17.0 // indirect + github.com/tidwall/gjson v1.17.1 // indirect github.com/tidwall/match v1.1.1 // indirect - github.com/tidwall/pretty v1.2.0 // indirect + github.com/tidwall/pretty v1.2.1 // indirect github.com/tidwall/sjson v1.2.5 // indirect + gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect diff --git a/flyteadmin/go.sum b/flyteadmin/go.sum index ec5e0cdc1c..afd775c3ba 100644 --- a/flyteadmin/go.sum +++ b/flyteadmin/go.sum @@ -611,8 +611,8 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= -github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= -github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= +github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= @@ -1159,6 +1159,7 @@ github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc= github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU= +github.com/santhosh-tekuri/jsonschema v1.2.4 h1:hNhW8e7t+H1vgY+1QeEQpveR6D4+OwKPXCfD2aieJis= github.com/santhosh-tekuri/jsonschema v1.2.4/go.mod h1:TEAUOeZSmIxTTuHatJzrvARHiuO9LYd+cIxzgEHCQI4= github.com/santhosh-tekuri/jsonschema/v2 v2.1.0/go.mod h1:yzJzKUGV4RbWqWIBBP4wSOBqavX5saE02yirLS0OTyg= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= @@ -1271,8 +1272,8 @@ github.com/tidwall/gjson v1.3.2/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJH github.com/tidwall/gjson v1.6.8/go.mod h1:zeFuBCIqD4sN/gmqBzZ4j7Jd6UcA2Fc56x7QFsv+8fI= github.com/tidwall/gjson v1.7.1/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM= -github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= +github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E= github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= @@ -1280,8 +1281,9 @@ github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JT github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.0.2/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.1.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= -github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.0.4/go.mod h1:bURseu1nuBkFpIES5cz6zBtjmYeOQmEESshn7VpF15Y= github.com/tidwall/sjson v1.1.5/go.mod h1:VuJzsZnTowhSxWdOgsAnb886i4AjEyTkk7tNtsL7EYE= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= @@ -1302,8 +1304,8 @@ github.com/unrolled/secure v0.0.0-20181005190816-ff9db2ff917f/go.mod h1:mnPT77IA github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/wI2L/jsondiff v0.5.0 h1:RRMTi/mH+R2aXcPe1VYyvGINJqQfC3R+KSEakuU1Ikw= -github.com/wI2L/jsondiff v0.5.0/go.mod h1:qqG6hnK0Lsrz2BpIVCxWiK9ItsBCpIZQiv0izJjOZ9s= +github.com/wI2L/jsondiff v0.6.0 h1:zrsH3FbfVa3JO9llxrcDy/XLkYPLgoMX6Mz3T2PP2AI= +github.com/wI2L/jsondiff v0.6.0/go.mod h1:D6aQ5gKgPF9g17j+E9N7aasmU1O+XvfmWm1y8UMmNpw= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v0.0.0-20180714160509-73f8eece6fdc/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= @@ -1319,6 +1321,8 @@ github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= +gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7 h1:BAkxmYRc1ZPl6Gap4HWqwPT8yLZMrgaAwx12Ft408sg= +gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7/go.mod h1:X40Z1OU8o1oiXWzBmkuYOaruzYGv60l0AxGiB0E9keI= go.elastic.co/apm v1.8.0/go.mod h1:tCw6CkOJgkWnzEthFN9HUP1uL3Gjc/Ur6m7gRPLaoH0= go.elastic.co/apm/module/apmhttp v1.8.0/go.mod h1:9LPFlEON51/lRbnWDfqAWErihIiAFDUMfMV27YjoWQ8= go.elastic.co/apm/module/apmot v1.8.0/go.mod h1:Q5Xzabte8G/fkvDjr1jlDuOSUt9hkVWNZEHh6ZNaTjI= diff --git a/flyteadmin/pkg/async/cloudevent/factory.go b/flyteadmin/pkg/async/cloudevent/factory.go index 51c38ffea4..4434b42aaa 100644 --- a/flyteadmin/pkg/async/cloudevent/factory.go +++ b/flyteadmin/pkg/async/cloudevent/factory.go @@ -101,7 +101,7 @@ func NewCloudEventsPublisher(ctx context.Context, db repositoryInterfaces.Reposi } if cloudEventsConfig.CloudEventVersion == runtimeInterfaces.CloudEventVersionv2 { - return cloudEventImplementations.NewCloudEventsWrappedPublisher(db, sender, scope, storageClient, urlData, remoteDataConfig) + return cloudEventImplementations.NewCloudEventsWrappedPublisher(db, sender, scope, storageClient, urlData, remoteDataConfig, cloudEventsConfig.EventsPublisherConfig) } return cloudEventImplementations.NewCloudEventsPublisher(sender, scope, cloudEventsConfig.EventsPublisherConfig.EventTypes) diff --git a/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go b/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go index 7aaab0bb60..a506910bba 100644 --- a/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go +++ b/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go @@ -57,20 +57,20 @@ func (p *Publisher) Publish(ctx context.Context, notificationType string, msg pr switch msgType := msg.(type) { case *admin.WorkflowExecutionEventRequest: - e := msgType.Event - executionID = e.ExecutionId.String() - phase = e.Phase.String() - eventTime = e.OccurredAt.AsTime() + e := msgType.GetEvent() + executionID = e.GetExecutionId().String() + phase = e.GetPhase().String() + eventTime = e.GetOccurredAt().AsTime() case *admin.TaskExecutionEventRequest: - e := msgType.Event - executionID = e.TaskId.String() - phase = e.Phase.String() - eventTime = e.OccurredAt.AsTime() + e := msgType.GetEvent() + executionID = e.GetTaskId().String() + phase = e.GetPhase().String() + eventTime = e.GetOccurredAt().AsTime() case *admin.NodeExecutionEventRequest: - e := msgType.Event - executionID = msgType.Event.Id.String() - phase = e.Phase.String() - eventTime = e.OccurredAt.AsTime() + e := msgType.GetEvent() + executionID = msgType.GetEvent().GetId().String() + phase = e.GetPhase().String() + eventTime = e.GetOccurredAt().AsTime() default: return fmt.Errorf("unsupported event types [%+v]", reflect.TypeOf(msg)) } @@ -114,12 +114,13 @@ func (p *Publisher) shouldPublishEvent(notificationType string) bool { } type CloudEventWrappedPublisher struct { - db repositoryInterfaces.Repository - sender interfaces.Sender - systemMetrics implementations.EventPublisherSystemMetrics - storageClient *storage.DataStore - urlData dataInterfaces.RemoteURLInterface - remoteDataConfig runtimeInterfaces.RemoteDataConfig + db repositoryInterfaces.Repository + sender interfaces.Sender + systemMetrics implementations.EventPublisherSystemMetrics + storageClient *storage.DataStore + urlData dataInterfaces.RemoteURLInterface + remoteDataConfig runtimeInterfaces.RemoteDataConfig + eventPublisherConfig runtimeInterfaces.EventsPublisherConfig } func (c *CloudEventWrappedPublisher) TransformWorkflowExecutionEvent(ctx context.Context, rawEvent *event.WorkflowExecutionEvent) (*event.CloudEventWorkflowExecution, error) { @@ -128,13 +129,13 @@ func (c *CloudEventWrappedPublisher) TransformWorkflowExecutionEvent(ctx context if rawEvent == nil { return nil, fmt.Errorf("nothing to publish, WorkflowExecution event is nil") } - if rawEvent.ExecutionId == nil { + if rawEvent.GetExecutionId() == nil { logger.Warningf(ctx, "nil execution id in event [%+v]", rawEvent) return nil, fmt.Errorf("nil execution id in event [%+v]", rawEvent) } - // For now, don't append any additional information unless succeeded - if rawEvent.Phase != core.WorkflowExecution_SUCCEEDED { + // For now, don't append any additional information unless succeeded or otherwise configured + if rawEvent.GetPhase() != core.WorkflowExecution_SUCCEEDED && !c.eventPublisherConfig.EnrichAllWorkflowEventTypes { return &event.CloudEventWorkflowExecution{ RawEvent: rawEvent, }, nil @@ -142,35 +143,35 @@ func (c *CloudEventWrappedPublisher) TransformWorkflowExecutionEvent(ctx context // TODO: Make this one call to the DB instead of two. executionModel, err := c.db.ExecutionRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: rawEvent.ExecutionId.Project, - Domain: rawEvent.ExecutionId.Domain, - Name: rawEvent.ExecutionId.Name, + Project: rawEvent.GetExecutionId().GetProject(), + Domain: rawEvent.GetExecutionId().GetDomain(), + Name: rawEvent.GetExecutionId().GetName(), }) if err != nil { - logger.Warningf(ctx, "couldn't find execution [%+v] for cloud event processing", rawEvent.ExecutionId) + logger.Warningf(ctx, "couldn't find execution [%+v] for cloud event processing", rawEvent.GetExecutionId()) return nil, err } ex, err := transformers.FromExecutionModel(ctx, executionModel, transformers.DefaultExecutionTransformerOptions) if err != nil { - logger.Warningf(ctx, "couldn't transform execution [%+v] for cloud event processing", rawEvent.ExecutionId) + logger.Warningf(ctx, "couldn't transform execution [%+v] for cloud event processing", rawEvent.GetExecutionId()) return nil, err } - if ex.Closure.WorkflowId == nil { + if ex.GetClosure().GetWorkflowId() == nil { logger.Warningf(ctx, "workflow id is nil for execution [%+v]", ex) return nil, fmt.Errorf("workflow id is nil for execution [%+v]", ex) } workflowModel, err := c.db.WorkflowRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: ex.Closure.WorkflowId.Project, - Domain: ex.Closure.WorkflowId.Domain, - Name: ex.Closure.WorkflowId.Name, - Version: ex.Closure.WorkflowId.Version, + Project: ex.GetClosure().GetWorkflowId().GetProject(), + Domain: ex.GetClosure().GetWorkflowId().GetDomain(), + Name: ex.GetClosure().GetWorkflowId().GetName(), + Version: ex.GetClosure().GetWorkflowId().GetVersion(), }) if err != nil { - logger.Warningf(ctx, "couldn't find workflow [%+v] for cloud event processing", ex.Closure.WorkflowId) + logger.Warningf(ctx, "couldn't find workflow [%+v] for cloud event processing", ex.GetClosure().GetWorkflowId()) return nil, err } var workflowInterface core.TypedInterface - if workflowModel.TypedInterface != nil && len(workflowModel.TypedInterface) > 0 { + if len(workflowModel.TypedInterface) > 0 { err = proto.Unmarshal(workflowModel.TypedInterface, &workflowInterface) if err != nil { return nil, fmt.Errorf( @@ -191,15 +192,16 @@ func (c *CloudEventWrappedPublisher) TransformWorkflowExecutionEvent(ctx context OutputInterface: &workflowInterface, ArtifactIds: spec.GetMetadata().GetArtifactIds(), ReferenceExecution: spec.GetMetadata().GetReferenceExecution(), - Principal: spec.GetMetadata().Principal, - LaunchPlanId: spec.LaunchPlan, + Principal: spec.GetMetadata().GetPrincipal(), + LaunchPlanId: spec.GetLaunchPlan(), + Labels: spec.GetLabels().GetValues(), }, nil } func getNodeExecutionContext(ctx context.Context, identifier *core.NodeExecutionIdentifier) context.Context { - ctx = contextutils.WithProjectDomain(ctx, identifier.ExecutionId.Project, identifier.ExecutionId.Domain) - ctx = contextutils.WithExecutionID(ctx, identifier.ExecutionId.Name) - return contextutils.WithNodeID(ctx, identifier.NodeId) + ctx = contextutils.WithProjectDomain(ctx, identifier.GetExecutionId().GetProject(), identifier.GetExecutionId().GetDomain()) + ctx = contextutils.WithExecutionID(ctx, identifier.GetExecutionId().GetName()) + return contextutils.WithNodeID(ctx, identifier.GetNodeId()) } // This is a rough copy of the ListTaskExecutions function in TaskExecutionManager. It can be deprecated once we move the processing out of Admin itself. @@ -230,7 +232,7 @@ func (c *CloudEventWrappedPublisher) getLatestTaskExecutions(ctx context.Context if err != nil { return nil, err } - if output.TaskExecutions == nil || len(output.TaskExecutions) == 0 { + if len(output.TaskExecutions) == 0 { logger.Debugf(ctx, "no task executions found for node exec id [%+v]", nodeExecutionID) return nil, nil } @@ -245,16 +247,16 @@ func (c *CloudEventWrappedPublisher) getLatestTaskExecutions(ctx context.Context } func (c *CloudEventWrappedPublisher) TransformNodeExecutionEvent(ctx context.Context, rawEvent *event.NodeExecutionEvent) (*event.CloudEventNodeExecution, error) { - if rawEvent == nil || rawEvent.Id == nil { + if rawEvent == nil || rawEvent.GetId() == nil { return nil, fmt.Errorf("nothing to publish, NodeExecution event or ID is nil") } // Skip nodes unless they're succeeded and not start nodes - if rawEvent.Phase != core.NodeExecution_SUCCEEDED { + if rawEvent.GetPhase() != core.NodeExecution_SUCCEEDED { return &event.CloudEventNodeExecution{ RawEvent: rawEvent, }, nil - } else if rawEvent.Id.NodeId == "start-node" { + } else if rawEvent.GetId().GetNodeId() == "start-node" { return &event.CloudEventNodeExecution{ RawEvent: rawEvent, }, nil @@ -263,12 +265,12 @@ func (c *CloudEventWrappedPublisher) TransformNodeExecutionEvent(ctx context.Con // This gets the parent workflow execution metadata executionModel, err := c.db.ExecutionRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: rawEvent.Id.ExecutionId.Project, - Domain: rawEvent.Id.ExecutionId.Domain, - Name: rawEvent.Id.ExecutionId.Name, + Project: rawEvent.GetId().GetExecutionId().GetProject(), + Domain: rawEvent.GetId().GetExecutionId().GetDomain(), + Name: rawEvent.GetId().GetExecutionId().GetName(), }) if err != nil { - logger.Infof(ctx, "couldn't find execution [%+v] for cloud event processing", rawEvent.Id.ExecutionId) + logger.Infof(ctx, "couldn't find execution [%+v] for cloud event processing", rawEvent.GetId().GetExecutionId()) return nil, err } @@ -283,22 +285,22 @@ func (c *CloudEventWrappedPublisher) TransformNodeExecutionEvent(ctx context.Con var taskExecID *core.TaskExecutionIdentifier var typedInterface *core.TypedInterface - lte, err := c.getLatestTaskExecutions(ctx, rawEvent.Id) + lte, err := c.getLatestTaskExecutions(ctx, rawEvent.GetId()) if err != nil { - logger.Errorf(ctx, "failed to get latest task execution for node exec id [%+v] with err: %v", rawEvent.Id, err) + logger.Errorf(ctx, "failed to get latest task execution for node exec id [%+v] with err: %v", rawEvent.GetId(), err) return nil, err } if lte != nil { taskModel, err := c.db.TaskRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: lte.Id.TaskId.Project, - Domain: lte.Id.TaskId.Domain, - Name: lte.Id.TaskId.Name, - Version: lte.Id.TaskId.Version, + Project: lte.GetId().GetTaskId().GetProject(), + Domain: lte.GetId().GetTaskId().GetDomain(), + Name: lte.GetId().GetTaskId().GetName(), + Version: lte.GetId().GetTaskId().GetVersion(), }) if err != nil { // TODO: metric this // metric - logger.Debugf(ctx, "Failed to get task with task id [%+v] with err %v", lte.Id.TaskId, err) + logger.Debugf(ctx, "Failed to get task with task id [%+v] with err %v", lte.GetId().GetTaskId(), err) return nil, err } task, err := transformers.FromTaskModel(taskModel) @@ -306,8 +308,8 @@ func (c *CloudEventWrappedPublisher) TransformNodeExecutionEvent(ctx context.Con logger.Debugf(ctx, "Failed to transform task model with err %v", err) return nil, err } - typedInterface = task.Closure.CompiledTask.Template.Interface - taskExecID = lte.Id + typedInterface = task.GetClosure().GetCompiledTask().GetTemplate().GetInterface() + taskExecID = lte.GetId() } return &event.CloudEventNodeExecution{ @@ -315,8 +317,9 @@ func (c *CloudEventWrappedPublisher) TransformNodeExecutionEvent(ctx context.Con TaskExecId: taskExecID, OutputInterface: typedInterface, ArtifactIds: spec.GetMetadata().GetArtifactIds(), - Principal: spec.GetMetadata().Principal, - LaunchPlanId: spec.LaunchPlan, + Principal: spec.GetMetadata().GetPrincipal(), + LaunchPlanId: spec.GetLaunchPlan(), + Labels: spec.GetLabels().GetValues(), }, nil } @@ -326,8 +329,24 @@ func (c *CloudEventWrappedPublisher) TransformTaskExecutionEvent(ctx context.Con return nil, fmt.Errorf("nothing to publish, TaskExecution event is nil") } + executionModel, err := c.db.ExecutionRepo().Get(ctx, repositoryInterfaces.Identifier{ + Project: rawEvent.GetParentNodeExecutionId().GetExecutionId().GetProject(), + Domain: rawEvent.GetParentNodeExecutionId().GetExecutionId().GetDomain(), + Name: rawEvent.GetParentNodeExecutionId().GetExecutionId().GetName(), + }) + if err != nil { + logger.Warningf(ctx, "couldn't find execution [%+v] for cloud event processing", rawEvent.GetParentNodeExecutionId().GetExecutionId()) + return nil, err + } + ex, err := transformers.FromExecutionModel(ctx, executionModel, transformers.DefaultExecutionTransformerOptions) + if err != nil { + logger.Warningf(ctx, "couldn't transform execution [%+v] for cloud event processing", rawEvent.GetParentNodeExecutionId().GetExecutionId()) + return nil, err + } + return &event.CloudEventTaskExecution{ RawEvent: rawEvent, + Labels: ex.GetSpec().GetLabels().GetValues(), }, nil } @@ -348,14 +367,14 @@ func (c *CloudEventWrappedPublisher) Publish(ctx context.Context, notificationTy switch msgType := msg.(type) { case *admin.WorkflowExecutionEventRequest: topic = "cloudevents.WorkflowExecution" - e := msgType.Event - executionID = e.ExecutionId.String() - phase = e.Phase.String() - eventTime = e.OccurredAt.AsTime() + e := msgType.GetEvent() + executionID = e.GetExecutionId().String() + phase = e.GetPhase().String() + eventTime = e.GetOccurredAt().AsTime() dummyNodeExecutionID := &core.NodeExecutionIdentifier{ NodeId: "end-node", - ExecutionId: e.ExecutionId, + ExecutionId: e.GetExecutionId(), } // This forms part of the key in the Artifact store, // but it should probably be entirely derived by that service instead. @@ -369,17 +388,17 @@ func (c *CloudEventWrappedPublisher) Publish(ctx context.Context, notificationTy case *admin.TaskExecutionEventRequest: topic = "cloudevents.TaskExecution" - e := msgType.Event - executionID = e.TaskId.String() - phase = e.Phase.String() - eventTime = e.OccurredAt.AsTime() + e := msgType.GetEvent() + executionID = e.GetTaskId().String() + phase = e.GetPhase().String() + eventTime = e.GetOccurredAt().AsTime() eventID = fmt.Sprintf("%v.%v", executionID, phase) - if e.ParentNodeExecutionId == nil { + if e.GetParentNodeExecutionId() == nil { return fmt.Errorf("parent node execution id is nil for task execution [%+v]", e) } - eventSource = common.FlyteURLKeyFromNodeExecutionIDRetry(e.ParentNodeExecutionId, - int(e.RetryAttempt)) + eventSource = common.FlyteURLKeyFromNodeExecutionIDRetry(e.GetParentNodeExecutionId(), + int(e.GetRetryAttempt())) finalMsg, err = c.TransformTaskExecutionEvent(ctx, e) if err != nil { logger.Errorf(ctx, "Failed to transform task execution event with error: %v", err) @@ -387,12 +406,12 @@ func (c *CloudEventWrappedPublisher) Publish(ctx context.Context, notificationTy } case *admin.NodeExecutionEventRequest: topic = "cloudevents.NodeExecution" - e := msgType.Event - executionID = msgType.Event.Id.String() - phase = e.Phase.String() - eventTime = e.OccurredAt.AsTime() + e := msgType.GetEvent() + executionID = msgType.GetEvent().GetId().String() + phase = e.GetPhase().String() + eventTime = e.GetOccurredAt().AsTime() eventID = fmt.Sprintf("%v.%v", executionID, phase) - eventSource = common.FlyteURLKeyFromNodeExecutionID(msgType.Event.Id) + eventSource = common.FlyteURLKeyFromNodeExecutionID(msgType.GetEvent().GetId()) finalMsg, err = c.TransformNodeExecutionEvent(ctx, e) if err != nil { logger.Errorf(ctx, "Failed to transform node execution event with error: %v", err) @@ -400,7 +419,7 @@ func (c *CloudEventWrappedPublisher) Publish(ctx context.Context, notificationTy } case *event.CloudEventExecutionStart: topic = "cloudevents.ExecutionStart" - executionID = msgType.ExecutionId.String() + executionID = msgType.GetExecutionId().String() eventID = fmt.Sprintf("%v", executionID) eventTime = time.Now() // CloudEventExecutionStart don't have a nested event @@ -472,14 +491,15 @@ func NewCloudEventsPublisher(sender interfaces.Sender, scope promutils.Scope, ev } func NewCloudEventsWrappedPublisher( - db repositoryInterfaces.Repository, sender interfaces.Sender, scope promutils.Scope, storageClient *storage.DataStore, urlData dataInterfaces.RemoteURLInterface, remoteDataConfig runtimeInterfaces.RemoteDataConfig) interfaces.Publisher { + db repositoryInterfaces.Repository, sender interfaces.Sender, scope promutils.Scope, storageClient *storage.DataStore, urlData dataInterfaces.RemoteURLInterface, remoteDataConfig runtimeInterfaces.RemoteDataConfig, eventPublisherConfig runtimeInterfaces.EventsPublisherConfig) interfaces.Publisher { return &CloudEventWrappedPublisher{ - db: db, - sender: sender, - systemMetrics: implementations.NewEventPublisherSystemMetrics(scope.NewSubScope("cloudevents_publisher")), - storageClient: storageClient, - urlData: urlData, - remoteDataConfig: remoteDataConfig, + db: db, + sender: sender, + systemMetrics: implementations.NewEventPublisherSystemMetrics(scope.NewSubScope("cloudevents_publisher")), + storageClient: storageClient, + urlData: urlData, + remoteDataConfig: remoteDataConfig, + eventPublisherConfig: eventPublisherConfig, } } diff --git a/flyteadmin/pkg/async/notifications/email.go b/flyteadmin/pkg/async/notifications/email.go index a89210cead..e23806cc12 100644 --- a/flyteadmin/pkg/async/notifications/email.go +++ b/flyteadmin/pkg/async/notifications/email.go @@ -30,58 +30,58 @@ const launchPlanVersion = "launch_plan.version" const replaceAllInstances = -1 func getProject(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Id.Project + return exec.GetId().GetProject() } func getDomain(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Id.Domain + return exec.GetId().GetDomain() } func getName(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Id.Name + return exec.GetId().GetName() } func getPhase(request *admin.WorkflowExecutionEventRequest, _ *admin.Execution) string { - return strings.ToLower(request.Event.Phase.String()) + return strings.ToLower(request.GetEvent().GetPhase().String()) } func getError(request *admin.WorkflowExecutionEventRequest, _ *admin.Execution) string { - if request.Event.GetError() != nil { - return fmt.Sprintf(executionError, request.Event.GetError().Message) + if request.GetEvent().GetError() != nil { + return fmt.Sprintf(executionError, request.GetEvent().GetError().GetMessage()) } return "" } func getWorkflowProject(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Closure.WorkflowId.Project + return exec.GetClosure().GetWorkflowId().GetProject() } func getWorkflowDomain(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Closure.WorkflowId.Domain + return exec.GetClosure().GetWorkflowId().GetDomain() } func getWorkflowName(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Closure.WorkflowId.Name + return exec.GetClosure().GetWorkflowId().GetName() } func getWorkflowVersion(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Closure.WorkflowId.Version + return exec.GetClosure().GetWorkflowId().GetVersion() } func getLaunchPlanProject(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Spec.LaunchPlan.Project + return exec.GetSpec().GetLaunchPlan().GetProject() } func getLaunchPlanDomain(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Spec.LaunchPlan.Domain + return exec.GetSpec().GetLaunchPlan().GetDomain() } func getLaunchPlanName(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Spec.LaunchPlan.Name + return exec.GetSpec().GetLaunchPlan().GetName() } func getLaunchPlanVersion(_ *admin.WorkflowExecutionEventRequest, exec *admin.Execution) string { - return exec.Spec.LaunchPlan.Version + return exec.GetSpec().GetLaunchPlan().GetVersion() } var getTemplateValueFuncs = map[string]GetTemplateValue{ diff --git a/flyteadmin/pkg/async/notifications/email_test.go b/flyteadmin/pkg/async/notifications/email_test.go index 35f351a45d..0d045603e8 100644 --- a/flyteadmin/pkg/async/notifications/email_test.go +++ b/flyteadmin/pkg/async/notifications/email_test.go @@ -155,8 +155,8 @@ func TestToEmailMessageFromWorkflowExecutionEvent(t *testing.T) { SubjectLine: `Notice: Execution "e124" has succeeded in "prod".`, Body: `Execution "e124" has succeeded in "prod". View details at
https://example.com/executions/proj/prod/e124.`, } - assert.True(t, emailMessage.Body == expected.Body) - assert.True(t, emailMessage.SubjectLine == expected.SubjectLine) - assert.True(t, emailMessage.SenderEmail == expected.SenderEmail) - assert.True(t, len(emailMessage.RecipientsEmail) == len(expected.RecipientsEmail)) + assert.True(t, emailMessage.GetBody() == expected.GetBody()) + assert.True(t, emailMessage.GetSubjectLine() == expected.GetSubjectLine()) + assert.True(t, emailMessage.GetSenderEmail() == expected.GetSenderEmail()) + assert.True(t, len(emailMessage.GetRecipientsEmail()) == len(expected.GetRecipientsEmail())) } diff --git a/flyteadmin/pkg/async/notifications/implementations/aws_emailer.go b/flyteadmin/pkg/async/notifications/implementations/aws_emailer.go index 712bd7080d..d9891d616b 100644 --- a/flyteadmin/pkg/async/notifications/implementations/aws_emailer.go +++ b/flyteadmin/pkg/async/notifications/implementations/aws_emailer.go @@ -23,7 +23,7 @@ type AwsEmailer struct { func FlyteEmailToSesEmailInput(email *admin.EmailMessage) ses.SendEmailInput { var toAddress []*string - for _, toEmail := range email.RecipientsEmail { + for _, toEmail := range email.GetRecipientsEmail() { // SES email input takes an array of pointers to strings so we have to create a new one for each email //nolint:unconvert e := string(toEmail) @@ -61,7 +61,7 @@ func (e *AwsEmailer) SendEmail(ctx context.Context, email *admin.EmailMessage) e e.systemMetrics.SendError.Inc() return errors.NewFlyteAdminErrorf(codes.Internal, "errors were seen while sending emails") } - logger.Debugf(ctx, "Sent email to %s sub: %s", email.RecipientsEmail, email.SubjectLine) + logger.Debugf(ctx, "Sent email to %s sub: %s", email.GetRecipientsEmail(), email.GetSubjectLine()) e.systemMetrics.SendSuccess.Inc() return nil } diff --git a/flyteadmin/pkg/async/notifications/implementations/aws_emailer_test.go b/flyteadmin/pkg/async/notifications/implementations/aws_emailer_test.go index 01a2a06273..1caf1ce3a1 100644 --- a/flyteadmin/pkg/async/notifications/implementations/aws_emailer_test.go +++ b/flyteadmin/pkg/async/notifications/implementations/aws_emailer_test.go @@ -46,18 +46,18 @@ func TestAwsEmailer_SendEmail(t *testing.T) { sendEmailValidationFunc := func(input *ses.SendEmailInput) (*ses.SendEmailOutput, error) { assert.Equal(t, *input.Source, expectedSenderEmail) - assert.Equal(t, *input.Message.Body.Html.Data, emailNotification.Body) - assert.Equal(t, *input.Message.Subject.Data, emailNotification.SubjectLine) + assert.Equal(t, *input.Message.Body.Html.Data, emailNotification.GetBody()) + assert.Equal(t, *input.Message.Subject.Data, emailNotification.GetSubjectLine()) for _, toEmail := range input.Destination.ToAddresses { var foundEmail = false - for _, verifyToEmail := range emailNotification.RecipientsEmail { + for _, verifyToEmail := range emailNotification.GetRecipientsEmail() { if *toEmail == verifyToEmail { foundEmail = true } } assert.Truef(t, foundEmail, "To Email address [%s] wasn't apart of original inputs.", *toEmail) } - assert.Equal(t, len(input.Destination.ToAddresses), len(emailNotification.RecipientsEmail)) + assert.Equal(t, len(input.Destination.ToAddresses), len(emailNotification.GetRecipientsEmail())) return &ses.SendEmailOutput{}, nil } mockAwsEmail.SetSendEmailFunc(sendEmailValidationFunc) @@ -80,8 +80,8 @@ func TestFlyteEmailToSesEmailInput(t *testing.T) { } sesEmailInput := FlyteEmailToSesEmailInput(emailNotification) - assert.Equal(t, *sesEmailInput.Destination.ToAddresses[0], emailNotification.RecipientsEmail[0]) - assert.Equal(t, *sesEmailInput.Destination.ToAddresses[1], emailNotification.RecipientsEmail[1]) + assert.Equal(t, *sesEmailInput.Destination.ToAddresses[0], emailNotification.GetRecipientsEmail()[0]) + assert.Equal(t, *sesEmailInput.Destination.ToAddresses[1], emailNotification.GetRecipientsEmail()[1]) assert.Equal(t, *sesEmailInput.Message.Subject.Data, "Notice: Execution \"name\" has succeeded in \"domain\".") } diff --git a/flyteadmin/pkg/async/notifications/implementations/aws_processor_test.go b/flyteadmin/pkg/async/notifications/implementations/aws_processor_test.go index e566fdd740..611cebceb2 100644 --- a/flyteadmin/pkg/async/notifications/implementations/aws_processor_test.go +++ b/flyteadmin/pkg/async/notifications/implementations/aws_processor_test.go @@ -31,10 +31,10 @@ func TestProcessor_StartProcessing(t *testing.T) { testSubscriber.JSONMessages = append(testSubscriber.JSONMessages, testSubscriberMessage) sendEmailValidationFunc := func(ctx context.Context, email *admin.EmailMessage) error { - assert.Equal(t, email.Body, testEmail.Body) - assert.Equal(t, email.RecipientsEmail, testEmail.RecipientsEmail) - assert.Equal(t, email.SubjectLine, testEmail.SubjectLine) - assert.Equal(t, email.SenderEmail, testEmail.SenderEmail) + assert.Equal(t, email.GetBody(), testEmail.GetBody()) + assert.Equal(t, email.GetRecipientsEmail(), testEmail.GetRecipientsEmail()) + assert.Equal(t, email.GetSubjectLine(), testEmail.GetSubjectLine()) + assert.Equal(t, email.GetSenderEmail(), testEmail.GetSenderEmail()) return nil } mockEmailer.SetSendEmailFunc(sendEmailValidationFunc) diff --git a/flyteadmin/pkg/async/notifications/implementations/gcp_processor_test.go b/flyteadmin/pkg/async/notifications/implementations/gcp_processor_test.go index d48efeeee9..6d0f7d87fe 100644 --- a/flyteadmin/pkg/async/notifications/implementations/gcp_processor_test.go +++ b/flyteadmin/pkg/async/notifications/implementations/gcp_processor_test.go @@ -35,10 +35,10 @@ func TestGcpProcessor_StartProcessing(t *testing.T) { testGcpProcessor := NewGcpProcessor(&testGcpSubscriber, &mockGcpEmailer, promutils.NewTestScope()) sendEmailValidationFunc := func(ctx context.Context, email *admin.EmailMessage) error { - assert.Equal(t, email.Body, testEmail.Body) - assert.Equal(t, email.RecipientsEmail, testEmail.RecipientsEmail) - assert.Equal(t, email.SubjectLine, testEmail.SubjectLine) - assert.Equal(t, email.SenderEmail, testEmail.SenderEmail) + assert.Equal(t, email.GetBody(), testEmail.GetBody()) + assert.Equal(t, email.GetRecipientsEmail(), testEmail.GetRecipientsEmail()) + assert.Equal(t, email.GetSubjectLine(), testEmail.GetSubjectLine()) + assert.Equal(t, email.GetSenderEmail(), testEmail.GetSenderEmail()) return nil } mockGcpEmailer.SetSendEmailFunc(sendEmailValidationFunc) diff --git a/flyteadmin/pkg/async/notifications/implementations/noop_notifications.go b/flyteadmin/pkg/async/notifications/implementations/noop_notifications.go index 03dfa063ea..adae0d92fc 100644 --- a/flyteadmin/pkg/async/notifications/implementations/noop_notifications.go +++ b/flyteadmin/pkg/async/notifications/implementations/noop_notifications.go @@ -16,7 +16,7 @@ type NoopEmail struct{} func (n *NoopEmail) SendEmail(ctx context.Context, email *admin.EmailMessage) error { logger.Debugf(ctx, "received noop SendEmail request with subject [%s] and recipient [%s]", - email.SubjectLine, strings.Join(email.RecipientsEmail, ",")) + email.GetSubjectLine(), strings.Join(email.GetRecipientsEmail(), ",")) return nil } diff --git a/flyteadmin/pkg/async/notifications/implementations/sandbox_processor_test.go b/flyteadmin/pkg/async/notifications/implementations/sandbox_processor_test.go index 83594284a9..17251ca723 100644 --- a/flyteadmin/pkg/async/notifications/implementations/sandbox_processor_test.go +++ b/flyteadmin/pkg/async/notifications/implementations/sandbox_processor_test.go @@ -20,10 +20,10 @@ func TestSandboxProcessor_StartProcessingSuccess(t *testing.T) { testSandboxProcessor := NewSandboxProcessor(msgChan, &mockSandboxEmailer) sendEmailValidationFunc := func(ctx context.Context, email *admin.EmailMessage) error { - assert.Equal(t, testEmail.Body, email.Body) - assert.Equal(t, testEmail.RecipientsEmail, email.RecipientsEmail) - assert.Equal(t, testEmail.SubjectLine, email.SubjectLine) - assert.Equal(t, testEmail.SenderEmail, email.SenderEmail) + assert.Equal(t, testEmail.GetBody(), email.GetBody()) + assert.Equal(t, testEmail.GetRecipientsEmail(), email.GetRecipientsEmail()) + assert.Equal(t, testEmail.GetSubjectLine(), email.GetSubjectLine()) + assert.Equal(t, testEmail.GetSenderEmail(), email.GetSenderEmail()) return nil } diff --git a/flyteadmin/pkg/async/notifications/implementations/sendgrid_emailer.go b/flyteadmin/pkg/async/notifications/implementations/sendgrid_emailer.go index c8386bd41e..a325cbee75 100644 --- a/flyteadmin/pkg/async/notifications/implementations/sendgrid_emailer.go +++ b/flyteadmin/pkg/async/notifications/implementations/sendgrid_emailer.go @@ -34,15 +34,15 @@ func getSendgridEmail(adminEmail *admin.EmailMessage) *mail.SGMailV3 { m := mail.NewV3Mail() // This from email address is really here as a formality. For sendgrid specifically, the sender email is determined // from the api key that's used, not what you send along here. - from := mail.NewEmail("Flyte Notifications", adminEmail.SenderEmail) - content := mail.NewContent("text/html", adminEmail.Body) + from := mail.NewEmail("Flyte Notifications", adminEmail.GetSenderEmail()) + content := mail.NewContent("text/html", adminEmail.GetBody()) m.SetFrom(from) m.AddContent(content) personalization := mail.NewPersonalization() - emailAddresses := getEmailAddresses(adminEmail.RecipientsEmail) + emailAddresses := getEmailAddresses(adminEmail.GetRecipientsEmail()) personalization.AddTos(emailAddresses...) - personalization.Subject = adminEmail.SubjectLine + personalization.Subject = adminEmail.GetSubjectLine() m.AddPersonalizations(personalization) return m diff --git a/flyteadmin/pkg/async/notifications/implementations/smtp_emailer.go b/flyteadmin/pkg/async/notifications/implementations/smtp_emailer.go index 5a705bc0c1..29a79b1589 100644 --- a/flyteadmin/pkg/async/notifications/implementations/smtp_emailer.go +++ b/flyteadmin/pkg/async/notifications/implementations/smtp_emailer.go @@ -72,11 +72,11 @@ func (s *SMTPEmailer) SendEmail(ctx context.Context, email *admin.EmailMessage) s.smtpClient = smtpClient } - if err := s.smtpClient.Mail(email.SenderEmail); err != nil { + if err := s.smtpClient.Mail(email.GetSenderEmail()); err != nil { return s.emailError(ctx, fmt.Sprintf("Error creating email instance: %s", err)) } - for _, recipient := range email.RecipientsEmail { + for _, recipient := range email.GetRecipientsEmail() { if err := s.smtpClient.Rcpt(recipient); err != nil { return s.emailError(ctx, fmt.Sprintf("Error adding email recipient: %s", err)) } @@ -113,8 +113,8 @@ func (s *SMTPEmailer) emailError(ctx context.Context, error string) error { func createMailBody(emailSender string, email *admin.EmailMessage) string { headerMap := make(map[string]string) headerMap["From"] = emailSender - headerMap["To"] = strings.Join(email.RecipientsEmail, ",") - headerMap["Subject"] = email.SubjectLine + headerMap["To"] = strings.Join(email.GetRecipientsEmail(), ",") + headerMap["Subject"] = email.GetSubjectLine() headerMap["Content-Type"] = "text/html; charset=\"UTF-8\"" mailMessage := "" @@ -123,7 +123,7 @@ func createMailBody(emailSender string, email *admin.EmailMessage) string { mailMessage += fmt.Sprintf("%s: %s\r\n", k, v) } - mailMessage += "\r\n" + email.Body + mailMessage += "\r\n" + email.GetBody() return mailMessage } @@ -140,7 +140,7 @@ func NewSMTPEmailer(ctx context.Context, config runtimeInterfaces.NotificationsC auth := smtp.PlainAuth("", emailConf.SMTPUsername, smtpPassword, emailConf.SMTPServer) - // #nosec G402 + // #nosec G402: Allow skipping TLS verification in specific environments. tlsConfiguration = &tls.Config{ InsecureSkipVerify: emailConf.SMTPSkipTLSVerify, ServerName: emailConf.SMTPServer, diff --git a/flyteadmin/pkg/async/schedule/aws/cloud_watch_scheduler.go b/flyteadmin/pkg/async/schedule/aws/cloud_watch_scheduler.go index 9c3cb166b5..d4249e9122 100644 --- a/flyteadmin/pkg/async/schedule/aws/cloud_watch_scheduler.go +++ b/flyteadmin/pkg/async/schedule/aws/cloud_watch_scheduler.go @@ -78,7 +78,7 @@ func getScheduleName(scheduleNamePrefix string, identifier *core.Identifier) str func getScheduleDescription(identifier *core.Identifier) string { return fmt.Sprintf(scheduleDescriptionFormat, - identifier.Project, identifier.Domain, identifier.Name) + identifier.GetProject(), identifier.GetDomain(), identifier.GetName()) } func getScheduleExpression(schedule *admin.Schedule) (string, error) { @@ -88,11 +88,11 @@ func getScheduleExpression(schedule *admin.Schedule) (string, error) { if schedule.GetRate() != nil { // AWS uses pluralization for units of values not equal to 1. // See https://docs.aws.amazon.com/lambda/latest/dg/tutorial-scheduled-events-schedule-expressions.html - unit := strings.ToLower(schedule.GetRate().Unit.String()) - if schedule.GetRate().Value != 1 { + unit := strings.ToLower(schedule.GetRate().GetUnit().String()) + if schedule.GetRate().GetValue() != 1 { unit = fmt.Sprintf("%ss", unit) } - return fmt.Sprintf(rateExpression, schedule.GetRate().Value, unit), nil + return fmt.Sprintf(rateExpression, schedule.GetRate().GetValue(), unit), nil } logger.Debugf(context.Background(), "scheduler encountered invalid schedule expression: %s", schedule.String()) return "", errors.NewFlyteAdminErrorf(codes.InvalidArgument, "unrecognized schedule expression") @@ -176,9 +176,9 @@ func (s *cloudWatchScheduler) CreateScheduleInput(ctx context.Context, appConfig payload, err := SerializeScheduleWorkflowPayload( schedule.GetKickoffTimeInputArg(), &admin.NamedEntityIdentifier{ - Project: identifier.Project, - Domain: identifier.Domain, - Name: identifier.Name, + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Name: identifier.GetName(), }) if err != nil { logger.Errorf(ctx, "failed to serialize schedule workflow payload for launch plan: %v with err: %v", diff --git a/flyteadmin/pkg/async/schedule/aws/shared.go b/flyteadmin/pkg/async/schedule/aws/shared.go index 3868e05799..e21b25ed5b 100644 --- a/flyteadmin/pkg/async/schedule/aws/shared.go +++ b/flyteadmin/pkg/async/schedule/aws/shared.go @@ -12,7 +12,7 @@ import ( func hashIdentifier(identifier *core.Identifier) uint64 { h := fnv.New64() _, err := h.Write([]byte(fmt.Sprintf(scheduleNameInputsFormat, - identifier.Project, identifier.Domain, identifier.Name))) + identifier.GetProject(), identifier.GetDomain(), identifier.GetName()))) if err != nil { // This shouldn't occur. logger.Errorf(context.Background(), diff --git a/flyteadmin/pkg/async/schedule/aws/workflow_executor.go b/flyteadmin/pkg/async/schedule/aws/workflow_executor.go index c4a5d75d14..d9a21c9026 100644 --- a/flyteadmin/pkg/async/schedule/aws/workflow_executor.go +++ b/flyteadmin/pkg/async/schedule/aws/workflow_executor.go @@ -63,18 +63,18 @@ var doNotconsumeBase64 = false func (e *workflowExecutor) resolveKickoffTimeArg( request ScheduledWorkflowExecutionRequest, launchPlan *admin.LaunchPlan, executionRequest *admin.ExecutionCreateRequest) error { - if request.KickoffTimeArg == "" || launchPlan.Closure.ExpectedInputs == nil { + if request.KickoffTimeArg == "" || launchPlan.GetClosure().GetExpectedInputs() == nil { logger.Debugf(context.Background(), "No kickoff time to resolve for scheduled workflow execution: [%s/%s/%s]", - executionRequest.Project, executionRequest.Domain, executionRequest.Name) + executionRequest.GetProject(), executionRequest.GetDomain(), executionRequest.GetName()) return nil } - for name := range launchPlan.Closure.ExpectedInputs.Parameters { + for name := range launchPlan.GetClosure().GetExpectedInputs().GetParameters() { if name == request.KickoffTimeArg { ts, err := ptypes.TimestampProto(request.KickoffTime) if err != nil { logger.Warningf(context.Background(), "failed to serialize kickoff time %+v to timestamp proto for scheduled workflow execution with "+ - "launchPlan [%+v]", request.KickoffTime, launchPlan.Id) + "launchPlan [%+v]", request.KickoffTime, launchPlan.GetId()) return errors.NewFlyteAdminErrorf( codes.Internal, "could not serialize kickoff time %+v to timestamp proto", request.KickoffTime) } @@ -96,7 +96,7 @@ func (e *workflowExecutor) resolveKickoffTimeArg( } logger.Warningf(context.Background(), "expected kickoff time arg with launch plan [%+v] but did not find any matching expected input to resolve", - launchPlan.Id) + launchPlan.GetId()) return nil } @@ -112,23 +112,24 @@ func (e *workflowExecutor) getActiveLaunchPlanVersion(launchPlanIdentifier *admi e.metrics.NoActiveLaunchPlanVersionsFound.Inc() return &admin.LaunchPlan{}, err } - if len(launchPlans.LaunchPlans) != 1 { + if len(launchPlans.GetLaunchPlans()) != 1 { e.metrics.GreaterThan1LaunchPlanVersionsFound.Inc() logger.Warningf(context.Background(), "failed to get exactly one active launch plan for identifier: %+v", launchPlanIdentifier) return &admin.LaunchPlan{}, errors.NewFlyteAdminErrorf(codes.Internal, "failed to get exactly one active launch plan for identifier: %+v", launchPlanIdentifier) } - return launchPlans.LaunchPlans[0], nil + return launchPlans.GetLaunchPlans()[0], nil } func generateExecutionName(launchPlan *admin.LaunchPlan, kickoffTime time.Time) string { hashedIdentifier := hashIdentifier(&core.Identifier{ - Project: launchPlan.Id.Project, - Domain: launchPlan.Id.Domain, - Name: launchPlan.Id.Name, + Project: launchPlan.GetId().GetProject(), + Domain: launchPlan.GetId().GetDomain(), + Name: launchPlan.GetId().GetName(), }) - randomSeed := kickoffTime.UnixNano() + int64(hashedIdentifier) + randomSeed := kickoffTime.UnixNano() + int64(hashedIdentifier) // #nosec G115 + return common.GetExecutionName(randomSeed) } @@ -137,7 +138,7 @@ func (e *workflowExecutor) formulateExecutionCreateRequest( // Deterministically assign a name based on the schedule kickoff time/launch plan definition. name := generateExecutionName(launchPlan, kickoffTime) logger.Debugf(context.Background(), "generated name [%s] for scheduled execution with launch plan [%+v]", - name, launchPlan.Id) + name, launchPlan.GetId()) kickoffTimeProto, err := ptypes.TimestampProto(kickoffTime) if err != nil { // We expected that kickoff times are valid (in order for a scheduled event to fire). @@ -148,11 +149,11 @@ func (e *workflowExecutor) formulateExecutionCreateRequest( kickoffTime, err) } executionRequest := &admin.ExecutionCreateRequest{ - Project: launchPlan.Id.Project, - Domain: launchPlan.Id.Domain, + Project: launchPlan.GetId().GetProject(), + Domain: launchPlan.GetId().GetDomain(), Name: name, Spec: &admin.ExecutionSpec{ - LaunchPlan: launchPlan.Id, + LaunchPlan: launchPlan.GetId(), Metadata: &admin.ExecutionMetadata{ Mode: admin.ExecutionMetadata_SCHEDULED, ScheduledAt: kickoffTimeProto, @@ -208,8 +209,8 @@ func (e *workflowExecutor) run() error { } executionRequest := e.formulateExecutionCreateRequest(launchPlan, scheduledWorkflowExecutionRequest.KickoffTime) - ctx = contextutils.WithWorkflowID(ctx, fmt.Sprintf(workflowIdentifierFmt, executionRequest.Project, - executionRequest.Domain, executionRequest.Name)) + ctx = contextutils.WithWorkflowID(ctx, fmt.Sprintf(workflowIdentifierFmt, executionRequest.GetProject(), + executionRequest.GetDomain(), executionRequest.GetName())) err = e.resolveKickoffTimeArg(scheduledWorkflowExecutionRequest, launchPlan, executionRequest) if err != nil { e.metrics.FailedResolveKickoffTimeArg.Inc() @@ -228,12 +229,12 @@ func (e *workflowExecutor) run() error { if ok && ec.Code() != codes.AlreadyExists { e.metrics.FailedKickoffExecution.Inc() logger.Errorf(context.Background(), "failed to execute scheduled workflow [%s:%s:%s] with err: %v", - executionRequest.Project, executionRequest.Domain, executionRequest.Name, err) + executionRequest.GetProject(), executionRequest.GetDomain(), executionRequest.GetName(), err) continue } } else { logger.Debugf(context.Background(), "created scheduled workflow execution %+v with kickoff time %+v", - response.Id, scheduledWorkflowExecutionRequest.KickoffTime) + response.GetId(), scheduledWorkflowExecutionRequest.KickoffTime) } executionLaunchTime := time.Now() diff --git a/flyteadmin/pkg/async/schedule/aws/workflow_executor_test.go b/flyteadmin/pkg/async/schedule/aws/workflow_executor_test.go index f6fc9b9693..38f8afddbd 100644 --- a/flyteadmin/pkg/async/schedule/aws/workflow_executor_test.go +++ b/flyteadmin/pkg/async/schedule/aws/workflow_executor_test.go @@ -82,9 +82,9 @@ func TestResolveKickoffTimeArg(t *testing.T) { }, } executionRequest := &admin.ExecutionCreateRequest{ - Project: testIdentifier.Project, - Domain: testIdentifier.Domain, - Name: testIdentifier.Name, + Project: testIdentifier.GetProject(), + Domain: testIdentifier.GetDomain(), + Name: testIdentifier.GetName(), Inputs: &core.LiteralMap{ Literals: map[string]*core.Literal{}, }, @@ -92,9 +92,9 @@ func TestResolveKickoffTimeArg(t *testing.T) { testExecutor := newWorkflowExecutorForTest(nil, nil, nil) err := testExecutor.resolveKickoffTimeArg(scheduleRequest, launchPlan, executionRequest) assert.Nil(t, err) - assert.Contains(t, executionRequest.Inputs.Literals, testKickoffTime) + assert.Contains(t, executionRequest.GetInputs().GetLiterals(), testKickoffTime) assert.Equal(t, testKickoffTimeProtoLiteral, - executionRequest.Inputs.Literals[testKickoffTime]) + executionRequest.GetInputs().GetLiterals()[testKickoffTime]) } func TestResolveKickoffTimeArg_NoKickoffTimeArg(t *testing.T) { @@ -112,9 +112,9 @@ func TestResolveKickoffTimeArg_NoKickoffTimeArg(t *testing.T) { }, } executionRequest := &admin.ExecutionCreateRequest{ - Project: testIdentifier.Project, - Domain: testIdentifier.Domain, - Name: testIdentifier.Name, + Project: testIdentifier.GetProject(), + Domain: testIdentifier.GetDomain(), + Name: testIdentifier.GetName(), Inputs: &core.LiteralMap{ Literals: map[string]*core.Literal{}, }, @@ -122,7 +122,7 @@ func TestResolveKickoffTimeArg_NoKickoffTimeArg(t *testing.T) { testExecutor := newWorkflowExecutorForTest(nil, nil, nil) err := testExecutor.resolveKickoffTimeArg(scheduleRequest, launchPlan, executionRequest) assert.Nil(t, err) - assert.NotContains(t, executionRequest.Inputs.Literals, testKickoffTime) + assert.NotContains(t, executionRequest.GetInputs().GetLiterals(), testKickoffTime) } func TestGetActiveLaunchPlanVersion(t *testing.T) { @@ -132,9 +132,9 @@ func TestGetActiveLaunchPlanVersion(t *testing.T) { Name: "name", } launchPlanIdentifier := core.Identifier{ - Project: launchPlanNamedIdentifier.Project, - Domain: launchPlanNamedIdentifier.Domain, - Name: launchPlanNamedIdentifier.Name, + Project: launchPlanNamedIdentifier.GetProject(), + Domain: launchPlanNamedIdentifier.GetDomain(), + Name: launchPlanNamedIdentifier.GetName(), Version: "foo", } @@ -142,9 +142,9 @@ func TestGetActiveLaunchPlanVersion(t *testing.T) { launchPlanManager.(*mocks.MockLaunchPlanManager).SetListLaunchPlansCallback( func(ctx context.Context, request *admin.ResourceListRequest) ( *admin.LaunchPlanList, error) { - assert.True(t, proto.Equal(launchPlanNamedIdentifier, request.Id)) - assert.Equal(t, "eq(state,1)", request.Filters) - assert.Equal(t, uint32(1), request.Limit) + assert.True(t, proto.Equal(launchPlanNamedIdentifier, request.GetId())) + assert.Equal(t, "eq(state,1)", request.GetFilters()) + assert.Equal(t, uint32(1), request.GetLimit()) return &admin.LaunchPlanList{ LaunchPlans: []*admin.LaunchPlan{ { @@ -156,7 +156,7 @@ func TestGetActiveLaunchPlanVersion(t *testing.T) { testExecutor := newWorkflowExecutorForTest(nil, nil, launchPlanManager) launchPlan, err := testExecutor.getActiveLaunchPlanVersion(launchPlanNamedIdentifier) assert.Nil(t, err) - assert.True(t, proto.Equal(&launchPlanIdentifier, launchPlan.Id)) + assert.True(t, proto.Equal(&launchPlanIdentifier, launchPlan.GetId())) } func TestGetActiveLaunchPlanVersion_ManagerError(t *testing.T) { @@ -198,13 +198,13 @@ func TestFormulateExecutionCreateRequest(t *testing.T) { } testExecutor := newWorkflowExecutorForTest(nil, nil, nil) executionRequest := testExecutor.formulateExecutionCreateRequest(launchPlan, time.Unix(1543607788, 0)) - assert.Equal(t, "foo", executionRequest.Project) - assert.Equal(t, "bar", executionRequest.Domain) - assert.Equal(t, "a2k4s9v5j246kwmdmh4t", executionRequest.Name) + assert.Equal(t, "foo", executionRequest.GetProject()) + assert.Equal(t, "bar", executionRequest.GetDomain()) + assert.Equal(t, "a2k4s9v5j246kwmdmh4t", executionRequest.GetName()) - assert.True(t, proto.Equal(&launchPlanIdentifier, executionRequest.Spec.LaunchPlan)) - assert.Equal(t, admin.ExecutionMetadata_SCHEDULED, executionRequest.Spec.Metadata.Mode) - assert.Equal(t, int64(1543607788), executionRequest.Spec.Metadata.ScheduledAt.Seconds) + assert.True(t, proto.Equal(&launchPlanIdentifier, executionRequest.GetSpec().GetLaunchPlan())) + assert.Equal(t, admin.ExecutionMetadata_SCHEDULED, executionRequest.GetSpec().GetMetadata().GetMode()) + assert.Equal(t, int64(1543607788), executionRequest.GetSpec().GetMetadata().GetScheduledAt().GetSeconds()) } func TestRun(t *testing.T) { @@ -234,12 +234,12 @@ func TestRun(t *testing.T) { testExecutionManager.SetCreateCallback(func( ctx context.Context, request *admin.ExecutionCreateRequest, requestedAt time.Time) ( *admin.ExecutionCreateResponse, error) { - assert.Equal(t, "project", request.Project) - assert.Equal(t, "domain", request.Domain) - assert.Equal(t, "ar8fphnlc5wh9dksjncj", request.Name) + assert.Equal(t, "project", request.GetProject()) + assert.Equal(t, "domain", request.GetDomain()) + assert.Equal(t, "ar8fphnlc5wh9dksjncj", request.GetName()) if messagesSeen == 0 { - assert.Contains(t, request.Inputs.Literals, testKickoffTime) - assert.Equal(t, testKickoffTimeProtoLiteral, request.Inputs.Literals[testKickoffTime]) + assert.Contains(t, request.GetInputs().GetLiterals(), testKickoffTime) + assert.Equal(t, testKickoffTimeProtoLiteral, request.GetInputs().GetLiterals()[testKickoffTime]) } messagesSeen++ return &admin.ExecutionCreateResponse{}, nil @@ -248,10 +248,10 @@ func TestRun(t *testing.T) { launchPlanManager.(*mocks.MockLaunchPlanManager).SetListLaunchPlansCallback( func(ctx context.Context, request *admin.ResourceListRequest) ( *admin.LaunchPlanList, error) { - assert.Equal(t, "project", request.Id.Project) - assert.Equal(t, "domain", request.Id.Domain) - assert.Equal(t, "eq(state,1)", request.Filters) - assert.Equal(t, uint32(1), request.Limit) + assert.Equal(t, "project", request.GetId().GetProject()) + assert.Equal(t, "domain", request.GetId().GetDomain()) + assert.Equal(t, "eq(state,1)", request.GetFilters()) + assert.Equal(t, uint32(1), request.GetLimit()) return &admin.LaunchPlanList{ LaunchPlans: []*admin.LaunchPlan{ { diff --git a/flyteadmin/pkg/async/schedule/mocks/mock_event_scheduler.go b/flyteadmin/pkg/async/schedule/mocks/mock_event_scheduler.go index fb9aebe34e..244cd0b40e 100644 --- a/flyteadmin/pkg/async/schedule/mocks/mock_event_scheduler.go +++ b/flyteadmin/pkg/async/schedule/mocks/mock_event_scheduler.go @@ -22,9 +22,9 @@ func (s *MockEventScheduler) CreateScheduleInput(ctx context.Context, appConfig payload, _ := aws.SerializeScheduleWorkflowPayload( schedule.GetKickoffTimeInputArg(), &admin.NamedEntityIdentifier{ - Project: identifier.Project, - Domain: identifier.Domain, - Name: identifier.Name, + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Name: identifier.GetName(), }) return interfaces.AddScheduleInput{Identifier: identifier, ScheduleExpression: schedule, Payload: payload}, nil } diff --git a/flyteadmin/pkg/clusterresource/controller.go b/flyteadmin/pkg/clusterresource/controller.go index 6ea1731909..37cfa46054 100644 --- a/flyteadmin/pkg/clusterresource/controller.go +++ b/flyteadmin/pkg/clusterresource/controller.go @@ -209,8 +209,8 @@ func (c *controller) getCustomTemplateValues( collectedErrs = append(collectedErrs, err) } } - if attributes != nil && attributes.Attributes != nil { - for templateKey, templateValue := range attributes.Attributes { + if attributes != nil && attributes.GetAttributes() != nil { + for templateKey, templateValue := range attributes.GetAttributes() { customTemplateValues[fmt.Sprintf(templateVariableFormat, templateKey)] = templateValue } } @@ -481,8 +481,8 @@ func (c *controller) createResourceFromTemplate(ctx context.Context, templateDir // First, add the special case namespace template which is always substituted by the system // rather than fetched via a user-specified source. templateValues[fmt.Sprintf(templateVariableFormat, namespaceVariable)] = namespace - templateValues[fmt.Sprintf(templateVariableFormat, projectVariable)] = project.Id - templateValues[fmt.Sprintf(templateVariableFormat, domainVariable)] = domain.Id + templateValues[fmt.Sprintf(templateVariableFormat, projectVariable)] = project.GetId() + templateValues[fmt.Sprintf(templateVariableFormat, domainVariable)] = domain.GetId() var k8sManifest = string(template) for templateKey, templateValue := range customTemplateValues { @@ -587,11 +587,11 @@ func (c *controller) Sync(ctx context.Context) error { stats := ResourceSyncStats{} - for _, project := range projects.Projects { - for _, domain := range project.Domains { - namespace := common.GetNamespaceName(c.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), project.Id, domain.Name) + for _, project := range projects.GetProjects() { + for _, domain := range project.GetDomains() { + namespace := common.GetNamespaceName(c.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), project.GetId(), domain.GetName()) customTemplateValues, err := c.getCustomTemplateValues( - ctx, project.Id, domain.Id, domainTemplateValues[domain.Id]) + ctx, project.GetId(), domain.GetId(), domainTemplateValues[domain.GetId()]) if err != nil { logger.Errorf(ctx, "Failed to get custom template values for %s with err: %v", namespace, err) errs = append(errs, err) diff --git a/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider.go b/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider.go index 550637183d..16113edd23 100644 --- a/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider.go +++ b/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider.go @@ -25,9 +25,9 @@ func (p serviceAdminProvider) GetClusterResourceAttributes(ctx context.Context, if err != nil { return nil, err } - if resource != nil && resource.Attributes != nil && resource.Attributes.MatchingAttributes != nil && - resource.Attributes.MatchingAttributes.GetClusterResourceAttributes() != nil { - return resource.Attributes.MatchingAttributes.GetClusterResourceAttributes(), nil + if resource != nil && resource.GetAttributes() != nil && resource.GetAttributes().GetMatchingAttributes() != nil && + resource.GetAttributes().GetMatchingAttributes().GetClusterResourceAttributes() != nil { + return resource.GetAttributes().GetMatchingAttributes().GetClusterResourceAttributes(), nil } return nil, NewMissingEntityError("cluster resource attributes") } @@ -56,11 +56,11 @@ func (p serviceAdminProvider) GetProjects(ctx context.Context) (*admin.Projects, if err != nil { return nil, err } - projects = append(projects, projectResp.Projects...) - if len(projectResp.Token) == 0 { + projects = append(projects, projectResp.GetProjects()...) + if len(projectResp.GetToken()) == 0 { break } - listReq.Token = projectResp.Token + listReq.Token = projectResp.GetToken() } return &admin.Projects{ Projects: projects, diff --git a/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider_test.go b/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider_test.go index 182c9e2573..0bd1390f50 100644 --- a/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider_test.go +++ b/flyteadmin/pkg/clusterresource/impl/admin_service_data_provider_test.go @@ -24,7 +24,7 @@ func TestServiceGetClusterResourceAttributes(t *testing.T) { } mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnGetProjectDomainAttributesMatch(ctx, mock.MatchedBy(func(req *admin.ProjectDomainAttributesGetRequest) bool { - return req.Project == project && req.Domain == domain && req.ResourceType == admin.MatchableResource_CLUSTER_RESOURCE + return req.GetProject() == project && req.GetDomain() == domain && req.GetResourceType() == admin.MatchableResource_CLUSTER_RESOURCE })).Return(&admin.ProjectDomainAttributesGetResponse{ Attributes: &admin.ProjectDomainAttributes{ MatchingAttributes: &admin.MatchingAttributes{ @@ -42,12 +42,12 @@ func TestServiceGetClusterResourceAttributes(t *testing.T) { } attrs, err := provider.GetClusterResourceAttributes(context.TODO(), project, domain) assert.NoError(t, err) - assert.EqualValues(t, attrs.Attributes, attributes) + assert.EqualValues(t, attrs.GetAttributes(), attributes) }) t.Run("admin service error", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnGetProjectDomainAttributesMatch(ctx, mock.MatchedBy(func(req *admin.ProjectDomainAttributesGetRequest) bool { - return req.Project == project && req.Domain == domain && req.ResourceType == admin.MatchableResource_CLUSTER_RESOURCE + return req.GetProject() == project && req.GetDomain() == domain && req.GetResourceType() == admin.MatchableResource_CLUSTER_RESOURCE })).Return(&admin.ProjectDomainAttributesGetResponse{}, errFoo) provider := serviceAdminProvider{ @@ -59,7 +59,7 @@ func TestServiceGetClusterResourceAttributes(t *testing.T) { t.Run("wonky admin service response", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnGetProjectDomainAttributesMatch(ctx, mock.MatchedBy(func(req *admin.ProjectDomainAttributesGetRequest) bool { - return req.Project == project && req.Domain == domain && req.ResourceType == admin.MatchableResource_CLUSTER_RESOURCE + return req.GetProject() == project && req.GetDomain() == domain && req.GetResourceType() == admin.MatchableResource_CLUSTER_RESOURCE })).Return(&admin.ProjectDomainAttributesGetResponse{ Attributes: &admin.ProjectDomainAttributes{ MatchingAttributes: &admin.MatchingAttributes{ @@ -88,7 +88,7 @@ func TestServiceGetProjects(t *testing.T) { t.Run("happy case", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnListProjectsMatch(ctx, mock.MatchedBy(func(req *admin.ProjectListRequest) bool { - return req.Limit == 100 && req.Filters == "ne(state,1)" && req.SortBy.Key == "created_at" + return req.GetLimit() == 100 && req.GetFilters() == "ne(state,1)" && req.GetSortBy().GetKey() == "created_at" })).Return(&admin.Projects{ Projects: []*admin.Project{ { @@ -104,12 +104,12 @@ func TestServiceGetProjects(t *testing.T) { } projects, err := provider.GetProjects(ctx) assert.NoError(t, err) - assert.Len(t, projects.Projects, 2) + assert.Len(t, projects.GetProjects(), 2) }) t.Run("admin error", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnListProjectsMatch(ctx, mock.MatchedBy(func(req *admin.ProjectListRequest) bool { - return req.Limit == 100 && req.Filters == "ne(state,1)" && req.SortBy.Key == "created_at" + return req.GetLimit() == 100 && req.GetFilters() == "ne(state,1)" && req.GetSortBy().GetKey() == "created_at" })).Return(nil, errFoo) provider := serviceAdminProvider{ adminClient: &mockAdmin, diff --git a/flyteadmin/pkg/clusterresource/impl/db_admin_data_provider_test.go b/flyteadmin/pkg/clusterresource/impl/db_admin_data_provider_test.go index 81ba4805ba..7fa0039799 100644 --- a/flyteadmin/pkg/clusterresource/impl/db_admin_data_provider_test.go +++ b/flyteadmin/pkg/clusterresource/impl/db_admin_data_provider_test.go @@ -49,7 +49,7 @@ func TestGetClusterResourceAttributes(t *testing.T) { } attrs, err := provider.GetClusterResourceAttributes(context.TODO(), project, domain) assert.NoError(t, err) - assert.EqualValues(t, attrs.Attributes, attributes) + assert.EqualValues(t, attrs.GetAttributes(), attributes) }) t.Run("error", func(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request interfaces.ResourceRequest) (*interfaces.ResourceResponse, error) { @@ -125,7 +125,7 @@ func TestGetProjects(t *testing.T) { } projects, err := provider.GetProjects(context.TODO()) assert.NoError(t, err) - assert.Len(t, projects.Projects, 2) + assert.Len(t, projects.GetProjects(), 2) }) t.Run("db error", func(t *testing.T) { mockRepo := repoMocks.NewMockRepository() diff --git a/flyteadmin/pkg/common/flyte_url.go b/flyteadmin/pkg/common/flyte_url.go index f5245ac238..e4c005d902 100644 --- a/flyteadmin/pkg/common/flyte_url.go +++ b/flyteadmin/pkg/common/flyte_url.go @@ -108,7 +108,7 @@ func ParseFlyteURLToExecution(flyteURL string) (ParsedExecution, error) { taskExecID := core.TaskExecutionIdentifier{ NodeExecutionId: &nodeExecID, // checking for overflow here is probably unreasonable - RetryAttempt: uint32(a), + RetryAttempt: uint32(a), // #nosec G115 } return ParsedExecution{ PartialTaskExecID: &taskExecID, @@ -126,8 +126,8 @@ func ParseFlyteURLToExecution(flyteURL string) (ParsedExecution, error) { } func FlyteURLsFromNodeExecutionID(nodeExecutionID *core.NodeExecutionIdentifier, deck bool) *admin.FlyteURLs { - base := fmt.Sprintf("flyte://v1/%s/%s/%s/%s", nodeExecutionID.ExecutionId.Project, - nodeExecutionID.ExecutionId.Domain, nodeExecutionID.ExecutionId.Name, nodeExecutionID.NodeId) + base := fmt.Sprintf("flyte://v1/%s/%s/%s/%s", nodeExecutionID.GetExecutionId().GetProject(), + nodeExecutionID.GetExecutionId().GetDomain(), nodeExecutionID.GetExecutionId().GetName(), nodeExecutionID.GetNodeId()) res := &admin.FlyteURLs{ Inputs: fmt.Sprintf("%s/%s", base, ArtifactTypeI), @@ -143,7 +143,7 @@ func FlyteURLsFromNodeExecutionID(nodeExecutionID *core.NodeExecutionIdentifier, // This constructs a fully unique prefix, and when post-pended with the output name, forms a fully unique name for // the artifact service (including the project/domain of course, which the artifact service will add). func FlyteURLKeyFromNodeExecutionID(nodeExecutionID *core.NodeExecutionIdentifier) string { - res := fmt.Sprintf("%s/%s", nodeExecutionID.ExecutionId.Name, nodeExecutionID.NodeId) + res := fmt.Sprintf("%s/%s", nodeExecutionID.GetExecutionId().GetName(), nodeExecutionID.GetNodeId()) return res } @@ -151,14 +151,14 @@ func FlyteURLKeyFromNodeExecutionID(nodeExecutionID *core.NodeExecutionIdentifie // FlyteURLKeyFromNodeExecutionIDRetry is a modified version of the function above. // See the uniqueness comment above. func FlyteURLKeyFromNodeExecutionIDRetry(nodeExecutionID *core.NodeExecutionIdentifier, retry int) string { - res := fmt.Sprintf("%s/%s/%s", nodeExecutionID.ExecutionId.Name, nodeExecutionID.NodeId, strconv.Itoa(retry)) + res := fmt.Sprintf("%s/%s/%s", nodeExecutionID.GetExecutionId().GetName(), nodeExecutionID.GetNodeId(), strconv.Itoa(retry)) return res } func FlyteURLsFromTaskExecutionID(taskExecutionID *core.TaskExecutionIdentifier, deck bool) *admin.FlyteURLs { - base := fmt.Sprintf("flyte://v1/%s/%s/%s/%s/%s", taskExecutionID.NodeExecutionId.ExecutionId.Project, - taskExecutionID.NodeExecutionId.ExecutionId.Domain, taskExecutionID.NodeExecutionId.ExecutionId.Name, taskExecutionID.NodeExecutionId.NodeId, strconv.Itoa(int(taskExecutionID.RetryAttempt))) + base := fmt.Sprintf("flyte://v1/%s/%s/%s/%s/%s", taskExecutionID.GetNodeExecutionId().GetExecutionId().GetProject(), + taskExecutionID.GetNodeExecutionId().GetExecutionId().GetDomain(), taskExecutionID.GetNodeExecutionId().GetExecutionId().GetName(), taskExecutionID.GetNodeExecutionId().GetNodeId(), strconv.Itoa(int(taskExecutionID.GetRetryAttempt()))) res := &admin.FlyteURLs{ Inputs: fmt.Sprintf("%s/%s", base, ArtifactTypeI), diff --git a/flyteadmin/pkg/common/flyte_url_test.go b/flyteadmin/pkg/common/flyte_url_test.go index a0cbfcda2b..bd954c5bb6 100644 --- a/flyteadmin/pkg/common/flyte_url_test.go +++ b/flyteadmin/pkg/common/flyte_url_test.go @@ -197,11 +197,11 @@ func TestParseFlyteURLToExecution(t *testing.T) { x, err := ParseFlyteURLToExecution("flyte://v1/fs/dev/abc/n0/3/o/o0") assert.NoError(t, err) assert.Nil(t, x.NodeExecID) - assert.Nil(t, x.PartialTaskExecID.TaskId) - assert.Equal(t, "fs", x.PartialTaskExecID.NodeExecutionId.ExecutionId.Project) - assert.Equal(t, "dev", x.PartialTaskExecID.NodeExecutionId.ExecutionId.Domain) - assert.Equal(t, "abc", x.PartialTaskExecID.NodeExecutionId.ExecutionId.Name) - assert.Equal(t, "n0", x.PartialTaskExecID.NodeExecutionId.NodeId) + assert.Nil(t, x.PartialTaskExecID.GetTaskId()) + assert.Equal(t, "fs", x.PartialTaskExecID.GetNodeExecutionId().GetExecutionId().GetProject()) + assert.Equal(t, "dev", x.PartialTaskExecID.GetNodeExecutionId().GetExecutionId().GetDomain()) + assert.Equal(t, "abc", x.PartialTaskExecID.GetNodeExecutionId().GetExecutionId().GetName()) + assert.Equal(t, "n0", x.PartialTaskExecID.GetNodeExecutionId().GetNodeId()) assert.Equal(t, uint32(3), x.PartialTaskExecID.GetRetryAttempt()) assert.Equal(t, "o0", x.LiteralName) }) @@ -210,11 +210,11 @@ func TestParseFlyteURLToExecution(t *testing.T) { x, err := ParseFlyteURLToExecution("flyte://v1/fs/dev/abc/n0/3/o") assert.NoError(t, err) assert.Nil(t, x.NodeExecID) - assert.Nil(t, x.PartialTaskExecID.TaskId) - assert.Equal(t, "fs", x.PartialTaskExecID.NodeExecutionId.ExecutionId.Project) - assert.Equal(t, "dev", x.PartialTaskExecID.NodeExecutionId.ExecutionId.Domain) - assert.Equal(t, "abc", x.PartialTaskExecID.NodeExecutionId.ExecutionId.Name) - assert.Equal(t, "n0", x.PartialTaskExecID.NodeExecutionId.NodeId) + assert.Nil(t, x.PartialTaskExecID.GetTaskId()) + assert.Equal(t, "fs", x.PartialTaskExecID.GetNodeExecutionId().GetExecutionId().GetProject()) + assert.Equal(t, "dev", x.PartialTaskExecID.GetNodeExecutionId().GetExecutionId().GetDomain()) + assert.Equal(t, "abc", x.PartialTaskExecID.GetNodeExecutionId().GetExecutionId().GetName()) + assert.Equal(t, "n0", x.PartialTaskExecID.GetNodeExecutionId().GetNodeId()) assert.Equal(t, uint32(3), x.PartialTaskExecID.GetRetryAttempt()) assert.Equal(t, "", x.LiteralName) }) @@ -224,10 +224,10 @@ func TestParseFlyteURLToExecution(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, x.NodeExecID) assert.Nil(t, x.PartialTaskExecID) - assert.Equal(t, "fs", x.NodeExecID.ExecutionId.Project) - assert.Equal(t, "dev", x.NodeExecID.ExecutionId.Domain) - assert.Equal(t, "abc", x.NodeExecID.ExecutionId.Name) - assert.Equal(t, "n0", x.NodeExecID.NodeId) + assert.Equal(t, "fs", x.NodeExecID.GetExecutionId().GetProject()) + assert.Equal(t, "dev", x.NodeExecID.GetExecutionId().GetDomain()) + assert.Equal(t, "abc", x.NodeExecID.GetExecutionId().GetName()) + assert.Equal(t, "n0", x.NodeExecID.GetNodeId()) assert.Equal(t, "o0", x.LiteralName) }) @@ -236,10 +236,10 @@ func TestParseFlyteURLToExecution(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, x.NodeExecID) assert.Nil(t, x.PartialTaskExecID) - assert.Equal(t, "fs", x.NodeExecID.ExecutionId.Project) - assert.Equal(t, "dev", x.NodeExecID.ExecutionId.Domain) - assert.Equal(t, "abc", x.NodeExecID.ExecutionId.Name) - assert.Equal(t, "n0", x.NodeExecID.NodeId) + assert.Equal(t, "fs", x.NodeExecID.GetExecutionId().GetProject()) + assert.Equal(t, "dev", x.NodeExecID.GetExecutionId().GetDomain()) + assert.Equal(t, "abc", x.NodeExecID.GetExecutionId().GetName()) + assert.Equal(t, "n0", x.NodeExecID.GetNodeId()) assert.Equal(t, "", x.LiteralName) }) @@ -248,10 +248,10 @@ func TestParseFlyteURLToExecution(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, x.NodeExecID) assert.Nil(t, x.PartialTaskExecID) - assert.Equal(t, "fs", x.NodeExecID.ExecutionId.Project) - assert.Equal(t, "dev", x.NodeExecID.ExecutionId.Domain) - assert.Equal(t, "abc", x.NodeExecID.ExecutionId.Name) - assert.Equal(t, "n0", x.NodeExecID.NodeId) + assert.Equal(t, "fs", x.NodeExecID.GetExecutionId().GetProject()) + assert.Equal(t, "dev", x.NodeExecID.GetExecutionId().GetDomain()) + assert.Equal(t, "abc", x.NodeExecID.GetExecutionId().GetName()) + assert.Equal(t, "n0", x.NodeExecID.GetNodeId()) assert.Equal(t, "", x.LiteralName) assert.Equal(t, ArtifactTypeI, x.IOType) }) @@ -261,10 +261,10 @@ func TestParseFlyteURLToExecution(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, x.NodeExecID) assert.Nil(t, x.PartialTaskExecID) - assert.Equal(t, "fs", x.NodeExecID.ExecutionId.Project) - assert.Equal(t, "dev", x.NodeExecID.ExecutionId.Domain) - assert.Equal(t, "abc", x.NodeExecID.ExecutionId.Name) - assert.Equal(t, "n0", x.NodeExecID.NodeId) + assert.Equal(t, "fs", x.NodeExecID.GetExecutionId().GetProject()) + assert.Equal(t, "dev", x.NodeExecID.GetExecutionId().GetDomain()) + assert.Equal(t, "abc", x.NodeExecID.GetExecutionId().GetName()) + assert.Equal(t, "n0", x.NodeExecID.GetNodeId()) assert.Equal(t, "", x.LiteralName) assert.Equal(t, ArtifactTypeD, x.IOType) }) diff --git a/flyteadmin/pkg/common/sorting.go b/flyteadmin/pkg/common/sorting.go index c89b86a914..246c73b52c 100644 --- a/flyteadmin/pkg/common/sorting.go +++ b/flyteadmin/pkg/common/sorting.go @@ -30,13 +30,13 @@ func NewSortParameter(sort *admin.Sort, allowed sets.String) (SortParameter, err return nil, nil } - key := sort.Key + key := sort.GetKey() if !allowed.Has(key) { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid sort key '%s'", key) } var gormOrderExpression string - switch sort.Direction { + switch sort.GetDirection() { case admin.Sort_DESCENDING: gormOrderExpression = fmt.Sprintf(gormDescending, key) case admin.Sort_ASCENDING: diff --git a/flyteadmin/pkg/config/config.go b/flyteadmin/pkg/config/config.go index f6bdd27141..0e63eccb45 100644 --- a/flyteadmin/pkg/config/config.go +++ b/flyteadmin/pkg/config/config.go @@ -66,10 +66,13 @@ type KubeClientConfig struct { } type ServerSecurityOptions struct { - Secure bool `json:"secure"` - Ssl SslOptions `json:"ssl"` - UseAuth bool `json:"useAuth"` - AuditAccess bool `json:"auditAccess"` + Secure bool `json:"secure"` + Ssl SslOptions `json:"ssl"` + UseAuth bool `json:"useAuth"` + // InsecureCookieHeader should only be set in the case where we want to serve cookies with the header "Secure" set to false. + // This is useful for local development and *never* in production. + InsecureCookieHeader bool `json:"insecureCookieHeader"` + AuditAccess bool `json:"auditAccess"` // These options are here to allow deployments where the Flyte UI (Console) is served from a different domain/port. // Note that CORS only applies to Admin's API endpoints. The health check endpoint for instance is unaffected. diff --git a/flyteadmin/pkg/config/serverconfig_flags.go b/flyteadmin/pkg/config/serverconfig_flags.go index 10229a458a..09a5d70a26 100755 --- a/flyteadmin/pkg/config/serverconfig_flags.go +++ b/flyteadmin/pkg/config/serverconfig_flags.go @@ -59,6 +59,7 @@ func (cfg ServerConfig) GetPFlagSet(prefix string) *pflag.FlagSet { cmdFlags.String(fmt.Sprintf("%v%v", prefix, "security.ssl.certificateFile"), defaultServerConfig.Security.Ssl.CertificateFile, "") cmdFlags.String(fmt.Sprintf("%v%v", prefix, "security.ssl.keyFile"), defaultServerConfig.Security.Ssl.KeyFile, "") cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "security.useAuth"), defaultServerConfig.Security.UseAuth, "") + cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "security.insecureCookieHeader"), defaultServerConfig.Security.InsecureCookieHeader, "") cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "security.auditAccess"), defaultServerConfig.Security.AuditAccess, "") cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "security.allowCors"), defaultServerConfig.Security.AllowCors, "") cmdFlags.StringSlice(fmt.Sprintf("%v%v", prefix, "security.allowedOrigins"), defaultServerConfig.Security.AllowedOrigins, "") diff --git a/flyteadmin/pkg/config/serverconfig_flags_test.go b/flyteadmin/pkg/config/serverconfig_flags_test.go index 6a95336f40..a18b56156e 100755 --- a/flyteadmin/pkg/config/serverconfig_flags_test.go +++ b/flyteadmin/pkg/config/serverconfig_flags_test.go @@ -225,6 +225,20 @@ func TestServerConfig_SetFlags(t *testing.T) { } }) }) + t.Run("Test_security.insecureCookieHeader", func(t *testing.T) { + + t.Run("Override", func(t *testing.T) { + testValue := "1" + + cmdFlags.Set("security.insecureCookieHeader", testValue) + if vBool, err := cmdFlags.GetBool("security.insecureCookieHeader"); err == nil { + testDecodeJson_ServerConfig(t, fmt.Sprintf("%v", vBool), &actual.Security.InsecureCookieHeader) + + } else { + assert.FailNow(t, err.Error()) + } + }) + }) t.Run("Test_security.auditAccess", func(t *testing.T) { t.Run("Override", func(t *testing.T) { diff --git a/flyteadmin/pkg/data/implementations/aws_remote_url_test.go b/flyteadmin/pkg/data/implementations/aws_remote_url_test.go index 878351fcf2..0677a498a3 100644 --- a/flyteadmin/pkg/data/implementations/aws_remote_url_test.go +++ b/flyteadmin/pkg/data/implementations/aws_remote_url_test.go @@ -80,6 +80,6 @@ func TestAWSGet(t *testing.T) { } urlBlob, err := remoteURL.Get(context.Background(), "s3://bucket/key") assert.Nil(t, err) - assert.Equal(t, "www://host/path", urlBlob.Url) - assert.Equal(t, contentLength, urlBlob.Bytes) + assert.Equal(t, "www://host/path", urlBlob.GetUrl()) + assert.Equal(t, contentLength, urlBlob.GetBytes()) } diff --git a/flyteadmin/pkg/data/implementations/gcp_remote_url.go b/flyteadmin/pkg/data/implementations/gcp_remote_url.go index 3a8dc98679..56fe7412e7 100644 --- a/flyteadmin/pkg/data/implementations/gcp_remote_url.go +++ b/flyteadmin/pkg/data/implementations/gcp_remote_url.go @@ -110,7 +110,7 @@ func (g *GCPRemoteURL) signURL(ctx context.Context, gcsURI GCPGCSObject) (string if err != nil { return nil, err } - return resp.SignedBlob, nil + return resp.GetSignedBlob(), nil }, Expires: time.Now().Add(g.signDuration), } @@ -159,8 +159,8 @@ func (ts impersonationTokenSource) Token() (*oauth2.Token, error) { } return &oauth2.Token{ - AccessToken: resp.AccessToken, - Expiry: asTime(resp.ExpireTime), + AccessToken: resp.GetAccessToken(), + Expiry: asTime(resp.GetExpireTime()), }, nil } diff --git a/flyteadmin/pkg/data/implementations/gcp_remote_url_test.go b/flyteadmin/pkg/data/implementations/gcp_remote_url_test.go index cfcce1ff5a..a1156518e6 100644 --- a/flyteadmin/pkg/data/implementations/gcp_remote_url_test.go +++ b/flyteadmin/pkg/data/implementations/gcp_remote_url_test.go @@ -88,7 +88,7 @@ func TestGCPGet(t *testing.T) { mockIAMCredentials := mockIAMCredentialsImpl{} mockIAMCredentials.signBlobFunc = func(ctx context.Context, req *credentialspb.SignBlobRequest, opts ...gax.CallOption) (*credentialspb.SignBlobResponse, error) { - assert.Equal(t, "projects/-/serviceAccounts/"+signingPrincipal, req.Name) + assert.Equal(t, "projects/-/serviceAccounts/"+signingPrincipal, req.GetName()) return &credentialspb.SignBlobResponse{SignedBlob: []byte(signedBlob)}, nil } @@ -102,12 +102,12 @@ func TestGCPGet(t *testing.T) { urlBlob, err := remoteURL.Get(context.Background(), "gs://bucket/key") assert.Nil(t, err) - u, _ := url.Parse(urlBlob.Url) + u, _ := url.Parse(urlBlob.GetUrl()) assert.Equal(t, "https", u.Scheme) assert.Equal(t, "storage.googleapis.com", u.Hostname()) assert.Equal(t, "/bucket/key", u.Path) assert.Equal(t, encodedSignedBlob, u.Query().Get("Signature")) - assert.Equal(t, int64(100), urlBlob.Bytes) + assert.Equal(t, int64(100), urlBlob.GetBytes()) } func TestToken(t *testing.T) { @@ -117,8 +117,8 @@ func TestToken(t *testing.T) { mockIAMCredentials := mockIAMCredentialsImpl{} mockIAMCredentials.generateAccessTokenFunc = func(ctx context.Context, req *credentialspb.GenerateAccessTokenRequest, opts ...gax.CallOption) (*credentialspb.GenerateAccessTokenResponse, error) { - assert.Equal(t, "projects/-/serviceAccounts/"+signingPrincipal, req.Name) - assert.Equal(t, []string{"https://www.googleapis.com/auth/devstorage.read_only"}, req.Scope) + assert.Equal(t, "projects/-/serviceAccounts/"+signingPrincipal, req.GetName()) + assert.Equal(t, []string{"https://www.googleapis.com/auth/devstorage.read_only"}, req.GetScope()) return &credentialspb.GenerateAccessTokenResponse{ AccessToken: token, ExpireTime: ×tamp, diff --git a/flyteadmin/pkg/data/implementations/noop_remote_url_test.go b/flyteadmin/pkg/data/implementations/noop_remote_url_test.go index 965dc9eeb2..c4e14a394a 100644 --- a/flyteadmin/pkg/data/implementations/noop_remote_url_test.go +++ b/flyteadmin/pkg/data/implementations/noop_remote_url_test.go @@ -44,6 +44,6 @@ func TestNoopRemoteURLGet(t *testing.T) { urlBlob, err := noopRemoteURL.Get(context.Background(), "uri") assert.Nil(t, err) assert.NotEmpty(t, urlBlob) - assert.Equal(t, "uri", urlBlob.Url) - assert.Equal(t, noopFileSize, urlBlob.Bytes) + assert.Equal(t, "uri", urlBlob.GetUrl()) + assert.Equal(t, noopFileSize, urlBlob.GetBytes()) } diff --git a/flyteadmin/pkg/errors/errors.go b/flyteadmin/pkg/errors/errors.go index 5fc48b0b67..8e280e11dd 100644 --- a/flyteadmin/pkg/errors/errors.go +++ b/flyteadmin/pkg/errors/errors.go @@ -91,7 +91,7 @@ func NewAlreadyInTerminalStateError(ctx context.Context, errorMsg string, curPha statusErr, transformationErr := NewFlyteAdminError(codes.FailedPrecondition, errorMsg).WithDetails(reason) if transformationErr != nil { logger.Panicf(ctx, "Failed to wrap grpc status in type 'Error': %v", transformationErr) - return NewFlyteAdminErrorf(codes.FailedPrecondition, errorMsg) + return NewFlyteAdminErrorf(codes.FailedPrecondition, errorMsg) //nolint } return statusErr } @@ -105,8 +105,8 @@ func NewIncompatibleClusterError(ctx context.Context, errorMsg, curCluster strin }, }) if transformationErr != nil { - logger.Panicf(ctx, "Failed to wrap grpc status in type 'Error': %v", transformationErr) - return NewFlyteAdminErrorf(codes.FailedPrecondition, errorMsg) + logger.Panicf(ctx, "Failed to wrap grpc status in type 'Error': %v", transformationErr) //nolint + return NewFlyteAdminErrorf(codes.FailedPrecondition, errorMsg) //nolint } return statusErr } @@ -128,23 +128,23 @@ func compareJsons(jsonArray1 jsondiff.Patch, jsonArray2 jsondiff.Patch) []string } func NewTaskExistsDifferentStructureError(ctx context.Context, request *admin.TaskCreateRequest, oldSpec *core.CompiledTask, newSpec *core.CompiledTask) FlyteAdminError { - errorMsg := fmt.Sprintf("%v task with different structure already exists. (Please register a new version of the task):\n", request.Id.Name) + errorMsg := fmt.Sprintf("%v task with different structure already exists. (Please register a new version of the task):\n", request.GetId().GetName()) diff, _ := jsondiff.Compare(oldSpec, newSpec) rdiff, _ := jsondiff.Compare(newSpec, oldSpec) rs := compareJsons(diff, rdiff) errorMsg += strings.Join(rs, "\n") - return NewFlyteAdminErrorf(codes.InvalidArgument, errorMsg) + return NewFlyteAdminErrorf(codes.InvalidArgument, errorMsg) //nolint } func NewTaskExistsIdenticalStructureError(ctx context.Context, request *admin.TaskCreateRequest) FlyteAdminError { errorMsg := "task with identical structure already exists" - return NewFlyteAdminErrorf(codes.AlreadyExists, errorMsg) + return NewFlyteAdminErrorf(codes.AlreadyExists, errorMsg) //nolint } func NewWorkflowExistsDifferentStructureError(ctx context.Context, request *admin.WorkflowCreateRequest, oldSpec *core.CompiledWorkflowClosure, newSpec *core.CompiledWorkflowClosure) FlyteAdminError { - errorMsg := fmt.Sprintf("%v workflow with different structure already exists. (Please register a new version of the workflow):\n", request.Id.Name) + errorMsg := fmt.Sprintf("%v workflow with different structure already exists. (Please register a new version of the workflow):\n", request.GetId().GetName()) diff, _ := jsondiff.Compare(oldSpec, newSpec) rdiff, _ := jsondiff.Compare(newSpec, oldSpec) rs := compareJsons(diff, rdiff) @@ -154,13 +154,13 @@ func NewWorkflowExistsDifferentStructureError(ctx context.Context, request *admi statusErr, transformationErr := NewFlyteAdminError(codes.InvalidArgument, errorMsg).WithDetails(&admin.CreateWorkflowFailureReason{ Reason: &admin.CreateWorkflowFailureReason_ExistsDifferentStructure{ ExistsDifferentStructure: &admin.WorkflowErrorExistsDifferentStructure{ - Id: request.Id, + Id: request.GetId(), }, }, }) if transformationErr != nil { logger.Errorf(ctx, "Failed to wrap grpc status in type 'Error': %v", transformationErr) - return NewFlyteAdminErrorf(codes.InvalidArgument, errorMsg) + return NewFlyteAdminErrorf(codes.InvalidArgument, errorMsg) //nolint } return statusErr } @@ -170,31 +170,31 @@ func NewWorkflowExistsIdenticalStructureError(ctx context.Context, request *admi statusErr, transformationErr := NewFlyteAdminError(codes.AlreadyExists, errorMsg).WithDetails(&admin.CreateWorkflowFailureReason{ Reason: &admin.CreateWorkflowFailureReason_ExistsIdenticalStructure{ ExistsIdenticalStructure: &admin.WorkflowErrorExistsIdenticalStructure{ - Id: request.Id, + Id: request.GetId(), }, }, }) if transformationErr != nil { logger.Errorf(ctx, "Failed to wrap grpc status in type 'Error': %v", transformationErr) - return NewFlyteAdminErrorf(codes.AlreadyExists, errorMsg) + return NewFlyteAdminErrorf(codes.AlreadyExists, errorMsg) //nolint } return statusErr } func NewLaunchPlanExistsDifferentStructureError(ctx context.Context, request *admin.LaunchPlanCreateRequest, oldSpec *admin.LaunchPlanSpec, newSpec *admin.LaunchPlanSpec) FlyteAdminError { - errorMsg := fmt.Sprintf("%v launch plan with different structure already exists. (Please register a new version of the launch plan):\n", request.Id.Name) + errorMsg := fmt.Sprintf("%v launch plan with different structure already exists. (Please register a new version of the launch plan):\n", request.GetId().GetName()) diff, _ := jsondiff.Compare(oldSpec, newSpec) rdiff, _ := jsondiff.Compare(newSpec, oldSpec) rs := compareJsons(diff, rdiff) errorMsg += strings.Join(rs, "\n") - return NewFlyteAdminErrorf(codes.InvalidArgument, errorMsg) + return NewFlyteAdminErrorf(codes.InvalidArgument, errorMsg) //nolint } func NewLaunchPlanExistsIdenticalStructureError(ctx context.Context, request *admin.LaunchPlanCreateRequest) FlyteAdminError { errorMsg := "launch plan with identical structure already exists" - return NewFlyteAdminErrorf(codes.AlreadyExists, errorMsg) + return NewFlyteAdminErrorf(codes.AlreadyExists, errorMsg) //nolint } func IsDoesNotExistError(err error) bool { @@ -209,12 +209,12 @@ func NewInactiveProjectError(ctx context.Context, id string) FlyteAdminError { }) if transformationErr != nil { logger.Errorf(ctx, "failed to wrap grpc status in type 'Error': %v", transformationErr) - return NewFlyteAdminErrorf(codes.InvalidArgument, errMsg) + return NewFlyteAdminErrorf(codes.InvalidArgument, errMsg) //nolint } return statusErr } func NewInvalidLiteralTypeError(name string, err error) FlyteAdminError { return NewFlyteAdminErrorf(codes.InvalidArgument, - fmt.Sprintf("Failed to validate literal type for [%s] with err: %s", name, err)) + fmt.Sprintf("Failed to validate literal type for [%s] with err: %s", name, err)) //nolint } diff --git a/flyteadmin/pkg/errors/errors_test.go b/flyteadmin/pkg/errors/errors_test.go index 18c76992b5..a72b4ce2eb 100644 --- a/flyteadmin/pkg/errors/errors_test.go +++ b/flyteadmin/pkg/errors/errors_test.go @@ -284,7 +284,7 @@ func TestNewLaunchPlanExistsDifferentStructureError(t *testing.T) { Id: &identifier, } - statusErr := NewLaunchPlanExistsDifferentStructureError(context.Background(), req, oldLaunchPlan.Spec, newLaunchPlan.Spec) + statusErr := NewLaunchPlanExistsDifferentStructureError(context.Background(), req, oldLaunchPlan.GetSpec(), newLaunchPlan.GetSpec()) assert.NotNil(t, statusErr) s, ok := status.FromError(statusErr) assert.True(t, ok) @@ -325,5 +325,5 @@ func TestNewInactiveProjectError(t *testing.T) { details, ok := statusErr.Details()[0].(*admin.InactiveProject) assert.True(t, ok) - assert.Equal(t, identifier.GetProject(), details.Id) + assert.Equal(t, identifier.GetProject(), details.GetId()) } diff --git a/flyteadmin/pkg/executioncluster/impl/in_cluster.go b/flyteadmin/pkg/executioncluster/impl/in_cluster.go index f06d1c4adf..2fdd8271e1 100644 --- a/flyteadmin/pkg/executioncluster/impl/in_cluster.go +++ b/flyteadmin/pkg/executioncluster/impl/in_cluster.go @@ -26,8 +26,8 @@ func (i InCluster) GetTarget(ctx context.Context, spec *executioncluster.Executi if spec != nil && !(spec.TargetID == "" || spec.TargetID == defaultInClusterTargetID) { return nil, errors.New(fmt.Sprintf("remote target %s is not supported", spec.TargetID)) } - if spec != nil && spec.ExecutionClusterLabel != nil && spec.ExecutionClusterLabel.Value != "" { - return nil, errors.New(fmt.Sprintf("execution cluster label %s is not supported", spec.ExecutionClusterLabel.Value)) + if spec != nil && spec.ExecutionClusterLabel != nil && spec.ExecutionClusterLabel.GetValue() != "" { + return nil, errors.New(fmt.Sprintf("execution cluster label %s is not supported", spec.ExecutionClusterLabel.GetValue())) } return &i.target, nil } diff --git a/flyteadmin/pkg/executioncluster/impl/random_cluster_selector.go b/flyteadmin/pkg/executioncluster/impl/random_cluster_selector.go index 35340d3822..e4c2149220 100644 --- a/flyteadmin/pkg/executioncluster/impl/random_cluster_selector.go +++ b/flyteadmin/pkg/executioncluster/impl/random_cluster_selector.go @@ -34,7 +34,7 @@ func getRandSource(seed string) (rand.Source, error) { if err != nil { return nil, err } - hashedSeed := int64(h.Sum64()) + hashedSeed := int64(h.Sum64()) // #nosec G115 return rand.NewSource(hashedSeed), nil } @@ -98,8 +98,8 @@ func (s RandomClusterSelector) GetTarget(ctx context.Context, spec *executionclu var label string - if spec.ExecutionClusterLabel != nil && spec.ExecutionClusterLabel.Value != "" { - label = spec.ExecutionClusterLabel.Value + if spec.ExecutionClusterLabel != nil && spec.ExecutionClusterLabel.GetValue() != "" { + label = spec.ExecutionClusterLabel.GetValue() logger.Debugf(ctx, "Using execution cluster label %s", label) } else { resource, err := s.resourceManager.GetResource(ctx, managerInterfaces.ResourceRequest{ @@ -113,7 +113,7 @@ func (s RandomClusterSelector) GetTarget(ctx context.Context, spec *executionclu return nil, err } if resource != nil && resource.Attributes.GetExecutionClusterLabel() != nil { - label = resource.Attributes.GetExecutionClusterLabel().Value + label = resource.Attributes.GetExecutionClusterLabel().GetValue() } } diff --git a/flyteadmin/pkg/manager/impl/description_entity_manager.go b/flyteadmin/pkg/manager/impl/description_entity_manager.go index a7affd5e88..7a2fdd239c 100644 --- a/flyteadmin/pkg/manager/impl/description_entity_manager.go +++ b/flyteadmin/pkg/manager/impl/description_entity_manager.go @@ -38,8 +38,8 @@ func (d *DescriptionEntityManager) GetDescriptionEntity(ctx context.Context, req logger.Errorf(ctx, "invalid request [%+v]: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Id.Project, request.Id.Domain) - return util.GetDescriptionEntity(ctx, d.db, request.Id) + ctx = contextutils.WithProjectDomain(ctx, request.GetId().GetProject(), request.GetId().GetDomain()) + return util.GetDescriptionEntity(ctx, d.db, request.GetId()) } func (d *DescriptionEntityManager) ListDescriptionEntity(ctx context.Context, request *admin.DescriptionEntityListRequest) (*admin.DescriptionEntityList, error) { @@ -47,44 +47,44 @@ func (d *DescriptionEntityManager) ListDescriptionEntity(ctx context.Context, re if err := validation.ValidateDescriptionEntityListRequest(request); err != nil { return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Id.Project, request.Id.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetId().GetProject(), request.GetId().GetDomain()) - if request.ResourceType == core.ResourceType_WORKFLOW { - ctx = contextutils.WithWorkflowID(ctx, request.Id.Name) + if request.GetResourceType() == core.ResourceType_WORKFLOW { + ctx = contextutils.WithWorkflowID(ctx, request.GetId().GetName()) } else { - ctx = contextutils.WithTaskID(ctx, request.Id.Name) + ctx = contextutils.WithTaskID(ctx, request.GetId().GetName()) } filters, err := util.GetDbFilters(util.FilterSpec{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, - RequestFilters: request.Filters, - }, common.ResourceTypeToEntity[request.ResourceType]) + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), + RequestFilters: request.GetFilters(), + }, common.ResourceTypeToEntity[request.GetResourceType()]) if err != nil { logger.Error(ctx, "failed to get database filter") return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.DescriptionEntityColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.DescriptionEntityColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListWorkflows", request.Token) + "invalid pagination token %s for ListWorkflows", request.GetToken()) } listDescriptionEntitiesInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, } output, err := d.db.DescriptionEntityRepo().List(ctx, listDescriptionEntitiesInput) if err != nil { - logger.Debugf(ctx, "Failed to list workflows with [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to list workflows with [%+v] with err %v", request.GetId(), err) return nil, err } descriptionEntityList, err := transformers.FromDescriptionEntityModels(output.Entities) @@ -94,7 +94,7 @@ func (d *DescriptionEntityManager) ListDescriptionEntity(ctx context.Context, re return nil, err } var token string - if len(output.Entities) == int(request.Limit) { + if len(output.Entities) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(output.Entities)) } return &admin.DescriptionEntityList{ diff --git a/flyteadmin/pkg/manager/impl/execution_manager.go b/flyteadmin/pkg/manager/impl/execution_manager.go index e700a744d8..fd8f0870f1 100644 --- a/flyteadmin/pkg/manager/impl/execution_manager.go +++ b/flyteadmin/pkg/manager/impl/execution_manager.go @@ -95,8 +95,8 @@ type ExecutionManager struct { } func getExecutionContext(ctx context.Context, id *core.WorkflowExecutionIdentifier) context.Context { - ctx = contextutils.WithExecutionID(ctx, id.Name) - return contextutils.WithProjectDomain(ctx, id.Project, id.Domain) + ctx = contextutils.WithExecutionID(ctx, id.GetName()) + return contextutils.WithProjectDomain(ctx, id.GetProject(), id.GetDomain()) } // Returns the unique string which identifies the authenticated end user (if any). @@ -108,16 +108,16 @@ func getUser(ctx context.Context) string { func (m *ExecutionManager) populateExecutionQueue( ctx context.Context, identifier *core.Identifier, compiledWorkflow *core.CompiledWorkflowClosure) { queueConfig := m.queueAllocator.GetQueue(ctx, identifier) - for _, task := range compiledWorkflow.Tasks { - container := task.Template.GetContainer() + for _, task := range compiledWorkflow.GetTasks() { + container := task.GetTemplate().GetContainer() if container == nil { // Unrecognized target type, nothing to do continue } if queueConfig.DynamicQueue != "" { - logger.Debugf(ctx, "Assigning %s as child queue for task %+v", queueConfig.DynamicQueue, task.Template.Id) - container.Config = append(container.Config, &core.KeyValuePair{ + logger.Debugf(ctx, "Assigning %s as child queue for task %+v", queueConfig.DynamicQueue, task.GetTemplate().GetId()) + container.Config = append(container.GetConfig(), &core.KeyValuePair{ Key: childContainerQueueKey, Value: queueConfig.DynamicQueue, }) @@ -159,8 +159,8 @@ func resolveStringMap(preferredValues, defaultValues mapWithValues, valueName st func (m *ExecutionManager) addPluginOverrides(ctx context.Context, executionID *core.WorkflowExecutionIdentifier, workflowName, launchPlanName string) ([]*admin.PluginOverride, error) { override, err := m.resourceManager.GetResource(ctx, interfaces.ResourceRequest{ - Project: executionID.Project, - Domain: executionID.Domain, + Project: executionID.GetProject(), + Domain: executionID.GetDomain(), Workflow: workflowName, LaunchPlan: launchPlanName, ResourceType: admin.MatchableResource_PLUGIN_OVERRIDE, @@ -169,7 +169,7 @@ func (m *ExecutionManager) addPluginOverrides(ctx context.Context, executionID * return nil, err } if override != nil && override.Attributes != nil && override.Attributes.GetPluginOverrides() != nil { - return override.Attributes.GetPluginOverrides().Overrides, nil + return override.Attributes.GetPluginOverrides().GetOverrides(), nil } return nil, nil } @@ -188,13 +188,13 @@ func (m *ExecutionManager) setCompiledTaskDefaults(ctx context.Context, task *co return } - if task.Template == nil || task.Template.GetContainer() == nil { + if task.GetTemplate() == nil || task.GetTemplate().GetContainer() == nil { // Nothing to do logger.Debugf(ctx, "Not setting default resources for task [%+v], no container resources found to check", task) return } - if task.Template.GetContainer().Resources == nil { + if task.GetTemplate().GetContainer().GetResources() == nil { // In case of no resources on the container, create empty requests and limits // so the container will still have resources configure properly task.Template.GetContainer().Resources = &core.Resources{ @@ -209,7 +209,7 @@ func (m *ExecutionManager) setCompiledTaskDefaults(ctx context.Context, task *co // The IDL representation for container-type tasks represents resources as a list with string quantities. // In order to easily reason about them we convert them to a set where we can O(1) fetch specific resources (e.g. CPU) // and represent them as comparable quantities rather than strings. - taskResourceRequirements := util.GetCompleteTaskResourceRequirements(ctx, task.Template.Id, task) + taskResourceRequirements := util.GetCompleteTaskResourceRequirements(ctx, task.GetTemplate().GetId(), task) cpu := flytek8s.AdjustOrDefaultResource(taskResourceRequirements.Defaults.CPU, taskResourceRequirements.Limits.CPU, platformTaskResources.Defaults.CPU, platformTaskResources.Limits.CPU) @@ -276,22 +276,22 @@ func (m *ExecutionManager) setCompiledTaskDefaults(ctx context.Context, task *co // as well as sets request spec metadata with the inherited principal and adjusted nesting data. func (m *ExecutionManager) getInheritedExecMetadata(ctx context.Context, requestSpec *admin.ExecutionSpec, workflowExecutionID *core.WorkflowExecutionIdentifier) (parentNodeExecutionID uint, sourceExecutionID uint, err error) { - if requestSpec.Metadata == nil || requestSpec.Metadata.ParentNodeExecution == nil { + if requestSpec.GetMetadata() == nil || requestSpec.GetMetadata().GetParentNodeExecution() == nil { return parentNodeExecutionID, sourceExecutionID, nil } - parentNodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, requestSpec.Metadata.ParentNodeExecution) + parentNodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, requestSpec.GetMetadata().GetParentNodeExecution()) if err != nil { logger.Errorf(ctx, "Failed to get node execution [%+v] that launched this execution [%+v] with error %v", - requestSpec.Metadata.ParentNodeExecution, workflowExecutionID, err) + requestSpec.GetMetadata().GetParentNodeExecution(), workflowExecutionID, err) return parentNodeExecutionID, sourceExecutionID, err } parentNodeExecutionID = parentNodeExecutionModel.ID - sourceExecutionModel, err := util.GetExecutionModel(ctx, m.db, requestSpec.Metadata.ParentNodeExecution.ExecutionId) + sourceExecutionModel, err := util.GetExecutionModel(ctx, m.db, requestSpec.GetMetadata().GetParentNodeExecution().GetExecutionId()) if err != nil { logger.Errorf(ctx, "Failed to get workflow execution [%+v] that launched this execution [%+v] with error %v", - requestSpec.Metadata.ParentNodeExecution, workflowExecutionID, err) + requestSpec.GetMetadata().GetParentNodeExecution(), workflowExecutionID, err) return parentNodeExecutionID, sourceExecutionID, err } sourceExecutionID = sourceExecutionModel.ID @@ -301,16 +301,16 @@ func (m *ExecutionManager) getInheritedExecMetadata(ctx context.Context, request logger.Errorf(ctx, "Failed transform parent execution model for child execution [%+v] with err: %v", workflowExecutionID, err) return parentNodeExecutionID, sourceExecutionID, err } - if sourceExecution.Spec.Metadata != nil { - requestSpec.Metadata.Nesting = sourceExecution.Spec.Metadata.Nesting + 1 + if sourceExecution.GetSpec().GetMetadata() != nil { + requestSpec.Metadata.Nesting = sourceExecution.GetSpec().GetMetadata().GetNesting() + 1 } else { requestSpec.Metadata.Nesting = 1 } // If the source execution has a cluster label, inherit it. - if sourceExecution.Spec.ExecutionClusterLabel != nil { - logger.Infof(ctx, "Inherited execution label from source execution [%+v]", sourceExecution.Spec.ExecutionClusterLabel.Value) - requestSpec.ExecutionClusterLabel = sourceExecution.Spec.ExecutionClusterLabel + if sourceExecution.GetSpec().GetExecutionClusterLabel() != nil { + logger.Infof(ctx, "Inherited execution label from source execution [%+v]", sourceExecution.GetSpec().GetExecutionClusterLabel().GetValue()) + requestSpec.ExecutionClusterLabel = sourceExecution.GetSpec().GetExecutionClusterLabel() } return parentNodeExecutionID, sourceExecutionID, nil } @@ -324,20 +324,20 @@ func (m *ExecutionManager) getExecutionConfig(ctx context.Context, request *admi workflowExecConfig := &admin.WorkflowExecutionConfig{} // Merge the request spec into workflowExecConfig - workflowExecConfig = util.MergeIntoExecConfig(workflowExecConfig, request.Spec) + workflowExecConfig = util.MergeIntoExecConfig(workflowExecConfig, request.GetSpec()) var workflowName string - if launchPlan != nil && launchPlan.Spec != nil { + if launchPlan != nil && launchPlan.GetSpec() != nil { // Merge the launch plan spec into workflowExecConfig - workflowExecConfig = util.MergeIntoExecConfig(workflowExecConfig, launchPlan.Spec) - if launchPlan.Spec.WorkflowId != nil { - workflowName = launchPlan.Spec.WorkflowId.Name + workflowExecConfig = util.MergeIntoExecConfig(workflowExecConfig, launchPlan.GetSpec()) + if launchPlan.GetSpec().GetWorkflowId() != nil { + workflowName = launchPlan.GetSpec().GetWorkflowId().GetName() } } // This will get the most specific Workflow Execution Config. matchableResource, err := util.GetMatchableResource(ctx, m.resourceManager, - admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, request.Project, request.Domain, workflowName) + admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, request.GetProject(), request.GetDomain(), workflowName) if err != nil { return nil, err } @@ -356,7 +356,7 @@ func (m *ExecutionManager) getExecutionConfig(ctx context.Context, request *admi // system level defaults for the rest. // See FLYTE-2322 for more background information. projectMatchableResource, err := util.GetMatchableResource(ctx, m.resourceManager, - admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, request.Project, "", "") + admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, request.GetProject(), "", "") if err != nil { return nil, err } @@ -404,7 +404,7 @@ func (m *ExecutionManager) getExecutionConfig(ctx context.Context, request *admi } func (m *ExecutionManager) getClusterAssignment(ctx context.Context, req *admin.ExecutionCreateRequest) (*admin.ClusterAssignment, error) { - storedAssignment, err := m.fetchClusterAssignment(ctx, req.Project, req.Domain) + storedAssignment, err := m.fetchClusterAssignment(ctx, req.GetProject(), req.GetDomain()) if err != nil { return nil, err } @@ -421,7 +421,7 @@ func (m *ExecutionManager) getClusterAssignment(ctx context.Context, req *admin. } if reqPool != storedPool { - return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "execution with project %q and domain %q cannot run on cluster pool %q, because its configured to run on pool %q", req.Project, req.Domain, reqPool, storedPool) + return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "execution with project %q and domain %q cannot run on cluster pool %q, because its configured to run on pool %q", req.GetProject(), req.GetDomain(), reqPool, storedPool) } return storedAssignment, nil @@ -454,10 +454,10 @@ func (m *ExecutionManager) launchSingleTaskExecution( context.Context, *models.Execution, error) { taskModel, err := m.db.TaskRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: request.Spec.LaunchPlan.Project, - Domain: request.Spec.LaunchPlan.Domain, - Name: request.Spec.LaunchPlan.Name, - Version: request.Spec.LaunchPlan.Version, + Project: request.GetSpec().GetLaunchPlan().GetProject(), + Domain: request.GetSpec().GetLaunchPlan().GetDomain(), + Name: request.GetSpec().GetLaunchPlan().GetName(), + Version: request.GetSpec().GetLaunchPlan().GetVersion(), }) if err != nil { return nil, nil, err @@ -468,7 +468,7 @@ func (m *ExecutionManager) launchSingleTaskExecution( } // Prepare a skeleton workflow and launch plan - taskIdentifier := request.Spec.LaunchPlan + taskIdentifier := request.GetSpec().GetLaunchPlan() workflowModel, err := util.CreateOrGetWorkflowModel(ctx, request, m.db, m.workflowManager, m.namedEntityManager, taskIdentifier, &task) if err != nil { @@ -481,27 +481,27 @@ func (m *ExecutionManager) launchSingleTaskExecution( } launchPlan, err := util.CreateOrGetLaunchPlan(ctx, m.db, m.config, m.namedEntityManager, taskIdentifier, - workflow.Closure.CompiledWorkflow.Primary.Template.Interface, workflowModel.ID, request.Spec) + workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface(), workflowModel.ID, request.GetSpec()) if err != nil { return nil, nil, err } executionInputs, err := validation.CheckAndFetchInputsForExecution( - request.Inputs, - launchPlan.Spec.FixedInputs, - launchPlan.Closure.ExpectedInputs, + request.GetInputs(), + launchPlan.GetSpec().GetFixedInputs(), + launchPlan.GetClosure().GetExpectedInputs(), ) if err != nil { logger.Debugf(ctx, "Failed to CheckAndFetchInputsForExecution with request.Inputs: %+v"+ "fixed inputs: %+v and expected inputs: %+v with err %v", - request.Inputs, launchPlan.Spec.FixedInputs, launchPlan.Closure.ExpectedInputs, err) + request.GetInputs(), launchPlan.GetSpec().GetFixedInputs(), launchPlan.GetClosure().GetExpectedInputs(), err) return nil, nil, err } name := util.GetExecutionName(request) workflowExecutionID := &core.WorkflowExecutionIdentifier{ - Project: request.Project, - Domain: request.Domain, + Project: request.GetProject(), + Domain: request.GetDomain(), Name: name, } @@ -519,15 +519,15 @@ func (m *ExecutionManager) launchSingleTaskExecution( offloadInputsGroup.Go(func() error { var err error inputsURI, err = common.OffloadLiteralMap(offloadInputsGroupCtx, m.storageClient, executionInputs, // or request.Inputs? - workflowExecutionID.Project, workflowExecutionID.Domain, workflowExecutionID.Name, shared.Inputs) + workflowExecutionID.GetProject(), workflowExecutionID.GetDomain(), workflowExecutionID.GetName(), shared.Inputs) return err }) var userInputsURI storage.DataReference offloadInputsGroup.Go(func() error { var err error - userInputsURI, err = common.OffloadLiteralMap(offloadInputsGroupCtx, m.storageClient, request.Inputs, - workflowExecutionID.Project, workflowExecutionID.Domain, workflowExecutionID.Name, shared.UserInputs) + userInputsURI, err = common.OffloadLiteralMap(offloadInputsGroupCtx, m.storageClient, request.GetInputs(), + workflowExecutionID.GetProject(), workflowExecutionID.GetDomain(), workflowExecutionID.GetName(), shared.UserInputs) return err }) @@ -535,15 +535,15 @@ func (m *ExecutionManager) launchSingleTaskExecution( if err != nil { return nil, nil, err } - closure.CreatedAt = workflow.Closure.CreatedAt + closure.CreatedAt = workflow.GetClosure().GetCreatedAt() workflow.Closure = closure ctx = getExecutionContext(ctx, workflowExecutionID) namespace := common.GetNamespaceName( - m.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), workflowExecutionID.Project, workflowExecutionID.Domain) + m.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), workflowExecutionID.GetProject(), workflowExecutionID.GetDomain()) - requestSpec := request.Spec - if requestSpec.Metadata == nil { + requestSpec := request.GetSpec() + if requestSpec.GetMetadata() == nil { requestSpec.Metadata = &admin.ExecutionMetadata{} } requestSpec.Metadata.Principal = getUser(ctx) @@ -557,13 +557,13 @@ func (m *ExecutionManager) launchSingleTaskExecution( } // Dynamically assign task resource defaults. - platformTaskResources := util.GetTaskResources(ctx, workflow.Id, m.resourceManager, m.config.TaskResourceConfiguration()) - for _, t := range workflow.Closure.CompiledWorkflow.Tasks { + platformTaskResources := util.GetTaskResources(ctx, workflow.GetId(), m.resourceManager, m.config.TaskResourceConfiguration()) + for _, t := range workflow.GetClosure().GetCompiledWorkflow().GetTasks() { m.setCompiledTaskDefaults(ctx, t, platformTaskResources) } // Dynamically assign execution queues. - m.populateExecutionQueue(ctx, workflow.Id, workflow.Closure.CompiledWorkflow) + m.populateExecutionQueue(ctx, workflow.GetId(), workflow.GetClosure().GetCompiledWorkflow()) executionConfig, err := m.getExecutionConfig(ctx, request, nil) if err != nil { @@ -571,23 +571,23 @@ func (m *ExecutionManager) launchSingleTaskExecution( } var labels map[string]string - if executionConfig.Labels != nil { - labels = executionConfig.Labels.Values + if executionConfig.GetLabels() != nil { + labels = executionConfig.GetLabels().GetValues() } - labels, err = m.addProjectLabels(ctx, request.Project, labels) + labels, err = m.addProjectLabels(ctx, request.GetProject(), labels) if err != nil { return nil, nil, err } var annotations map[string]string - if executionConfig.Annotations != nil { - annotations = executionConfig.Annotations.Values + if executionConfig.GetAnnotations() != nil { + annotations = executionConfig.GetAnnotations().GetValues() } var rawOutputDataConfig *admin.RawOutputDataConfig - if executionConfig.RawOutputDataConfig != nil { - rawOutputDataConfig = executionConfig.RawOutputDataConfig + if executionConfig.GetRawOutputDataConfig() != nil { + rawOutputDataConfig = executionConfig.GetRawOutputDataConfig() } clusterAssignment, err := m.getClusterAssignment(ctx, request) @@ -596,8 +596,8 @@ func (m *ExecutionManager) launchSingleTaskExecution( } var executionClusterLabel *admin.ExecutionClusterLabel - if requestSpec.ExecutionClusterLabel != nil { - executionClusterLabel = requestSpec.ExecutionClusterLabel + if requestSpec.GetExecutionClusterLabel() != nil { + executionClusterLabel = requestSpec.GetExecutionClusterLabel() } executionParameters := workflowengineInterfaces.ExecutionParameters{ Inputs: executionInputs, @@ -613,16 +613,16 @@ func (m *ExecutionManager) launchSingleTaskExecution( ExecutionClusterLabel: executionClusterLabel, } - overrides, err := m.addPluginOverrides(ctx, workflowExecutionID, workflowExecutionID.Name, "") + overrides, err := m.addPluginOverrides(ctx, workflowExecutionID, workflowExecutionID.GetName(), "") if err != nil { return nil, nil, err } if overrides != nil { executionParameters.TaskPluginOverrides = overrides } - if request.Spec.Metadata != nil && request.Spec.Metadata.ReferenceExecution != nil && - request.Spec.Metadata.Mode == admin.ExecutionMetadata_RECOVERED { - executionParameters.RecoveryExecution = request.Spec.Metadata.ReferenceExecution + if request.GetSpec().GetMetadata() != nil && request.GetSpec().GetMetadata().GetReferenceExecution() != nil && + request.GetSpec().GetMetadata().GetMode() == admin.ExecutionMetadata_RECOVERED { + executionParameters.RecoveryExecution = request.GetSpec().GetMetadata().GetReferenceExecution() } err = offloadInputsGroup.Wait() @@ -634,9 +634,9 @@ func (m *ExecutionManager) launchSingleTaskExecution( execInfo, err := workflowExecutor.Execute(ctx, workflowengineInterfaces.ExecutionData{ Namespace: namespace, ExecutionID: workflowExecutionID, - ReferenceWorkflowName: workflow.Id.Name, - ReferenceLaunchPlanName: launchPlan.Id.Name, - WorkflowClosure: workflow.Closure.CompiledWorkflow, + ReferenceWorkflowName: workflow.GetId().GetName(), + ReferenceLaunchPlanName: launchPlan.GetId().GetName(), + WorkflowClosure: workflow.GetClosure().GetCompiledWorkflow(), WorkflowClosureReference: storage.DataReference(workflowModel.RemoteClosureIdentifier), ExecutionParameters: executionParameters, OffloadedInputsReference: inputsURI, @@ -645,7 +645,7 @@ func (m *ExecutionManager) launchSingleTaskExecution( if err != nil { m.systemMetrics.PropellerFailures.Inc() logger.Infof(ctx, "Failed to execute workflow %+v with execution id %+v and inputs %+v with err %v", - request, &workflowExecutionID, request.Inputs, err) + request, &workflowExecutionID, request.GetInputs(), err) return nil, nil, err } executionCreatedAt := time.Now() @@ -655,13 +655,13 @@ func (m *ExecutionManager) launchSingleTaskExecution( // Request notification settings takes precedence over the launch plan settings. // If there is no notification in the request and DisableAll is not true, use the settings from the launch plan. var notificationsSettings []*admin.Notification - if launchPlan.Spec.GetEntityMetadata() != nil { - notificationsSettings = launchPlan.Spec.EntityMetadata.GetNotifications() + if launchPlan.GetSpec().GetEntityMetadata() != nil { + notificationsSettings = launchPlan.GetSpec().GetEntityMetadata().GetNotifications() } - if request.Spec.GetNotifications() != nil && request.Spec.GetNotifications().Notifications != nil && - len(request.Spec.GetNotifications().Notifications) > 0 { - notificationsSettings = request.Spec.GetNotifications().Notifications - } else if request.Spec.GetDisableAll() { + if request.GetSpec().GetNotifications() != nil && request.GetSpec().GetNotifications().GetNotifications() != nil && + len(request.GetSpec().GetNotifications().GetNotifications()) > 0 { + notificationsSettings = request.GetSpec().GetNotifications().GetNotifications() + } else if request.GetSpec().GetDisableAll() { notificationsSettings = make([]*admin.Notification, 0) } @@ -673,14 +673,14 @@ func (m *ExecutionManager) launchSingleTaskExecution( // The execution is not considered running until the propeller sends a specific event saying so. CreatedAt: m._clock.Now(), Notifications: notificationsSettings, - WorkflowIdentifier: workflow.Id, + WorkflowIdentifier: workflow.GetId(), ParentNodeExecutionID: parentNodeExecutionID, SourceExecutionID: sourceExecutionID, Cluster: execInfo.Cluster, InputsURI: inputsURI, UserInputsURI: userInputsURI, - SecurityContext: executionConfig.SecurityContext, - LaunchEntity: taskIdentifier.ResourceType, + SecurityContext: executionConfig.GetSecurityContext(), + LaunchEntity: taskIdentifier.GetResourceType(), Namespace: namespace, }) if err != nil { @@ -688,27 +688,27 @@ func (m *ExecutionManager) launchSingleTaskExecution( workflowExecutionID, err) return nil, nil, err } - m.userMetrics.WorkflowExecutionInputBytes.Observe(float64(proto.Size(request.Inputs))) + m.userMetrics.WorkflowExecutionInputBytes.Observe(float64(proto.Size(request.GetInputs()))) return ctx, executionModel, nil } func resolveAuthRole(request *admin.ExecutionCreateRequest, launchPlan *admin.LaunchPlan) *admin.AuthRole { - if request.Spec.AuthRole != nil { - return request.Spec.AuthRole + if request.GetSpec().GetAuthRole() != nil { + return request.GetSpec().GetAuthRole() } - if launchPlan == nil || launchPlan.Spec == nil { + if launchPlan == nil || launchPlan.GetSpec() == nil { return &admin.AuthRole{} } // Set role permissions based on launch plan Auth values. // The branched-ness of this check is due to the presence numerous deprecated fields - if launchPlan.Spec.GetAuthRole() != nil { - return launchPlan.Spec.GetAuthRole() + if launchPlan.GetSpec().GetAuthRole() != nil { + return launchPlan.GetSpec().GetAuthRole() } else if launchPlan.GetSpec().GetAuth() != nil { return &admin.AuthRole{ - AssumableIamRole: launchPlan.GetSpec().GetAuth().AssumableIamRole, - KubernetesServiceAccount: launchPlan.GetSpec().GetAuth().KubernetesServiceAccount, + AssumableIamRole: launchPlan.GetSpec().GetAuth().GetAssumableIamRole(), + KubernetesServiceAccount: launchPlan.GetSpec().GetAuth().GetKubernetesServiceAccount(), } } else if len(launchPlan.GetSpec().GetRole()) > 0 { return &admin.AuthRole{ @@ -722,17 +722,17 @@ func resolveAuthRole(request *admin.ExecutionCreateRequest, launchPlan *admin.La func resolveSecurityCtx(ctx context.Context, executionConfigSecurityCtx *core.SecurityContext, resolvedAuthRole *admin.AuthRole) *core.SecurityContext { // Use security context from the executionConfigSecurityCtx if its set and non empty or else resolve from authRole - if executionConfigSecurityCtx != nil && executionConfigSecurityCtx.RunAs != nil && - (len(executionConfigSecurityCtx.RunAs.K8SServiceAccount) > 0 || - len(executionConfigSecurityCtx.RunAs.IamRole) > 0 || - len(executionConfigSecurityCtx.RunAs.ExecutionIdentity) > 0) { + if executionConfigSecurityCtx != nil && executionConfigSecurityCtx.GetRunAs() != nil && + (len(executionConfigSecurityCtx.GetRunAs().GetK8SServiceAccount()) > 0 || + len(executionConfigSecurityCtx.GetRunAs().GetIamRole()) > 0 || + len(executionConfigSecurityCtx.GetRunAs().GetExecutionIdentity()) > 0) { return executionConfigSecurityCtx } logger.Warn(ctx, "Setting security context from auth Role") return &core.SecurityContext{ RunAs: &core.Identity{ - IamRole: resolvedAuthRole.AssumableIamRole, - K8SServiceAccount: resolvedAuthRole.KubernetesServiceAccount, + IamRole: resolvedAuthRole.GetAssumableIamRole(), + K8SServiceAccount: resolvedAuthRole.GetKubernetesServiceAccount(), }, } } @@ -755,7 +755,7 @@ func (m *ExecutionManager) getStringFromInput(ctx context.Context, inputBinding case *core.Primitive_Integer: strVal = p.GetStringValue() case *core.Primitive_Datetime: - t := time.Unix(p.GetDatetime().Seconds, int64(p.GetDatetime().Nanos)) + t := time.Unix(p.GetDatetime().GetSeconds(), int64(p.GetDatetime().GetNanos())) t = t.In(time.UTC) strVal = t.Format("2006-01-02") case *core.Primitive_StringValue: @@ -812,7 +812,7 @@ func (m *ExecutionManager) fillInTemplateArgs(ctx context.Context, query *core.A var partitions map[string]*core.LabelValue if artifactID.GetPartitions().GetValue() != nil { - partitions = make(map[string]*core.LabelValue, len(artifactID.GetPartitions().Value)) + partitions = make(map[string]*core.LabelValue, len(artifactID.GetPartitions().GetValue())) for k, v := range artifactID.GetPartitions().GetValue() { newValue, err := m.getLabelValue(ctx, v, inputs) if err != nil { @@ -825,20 +825,20 @@ func (m *ExecutionManager) fillInTemplateArgs(ctx context.Context, query *core.A var timePartition *core.TimePartition if artifactID.GetTimePartition().GetValue() != nil { - if artifactID.GetTimePartition().Value.GetTimeValue() != nil { + if artifactID.GetTimePartition().GetValue().GetTimeValue() != nil { // If the time value is set, then just pass it through, nothing to fill in. timePartition = artifactID.GetTimePartition() - } else if artifactID.GetTimePartition().Value.GetInputBinding() != nil { + } else if artifactID.GetTimePartition().GetValue().GetInputBinding() != nil { // Evaluate the time partition input binding - lit, ok := inputs[artifactID.GetTimePartition().Value.GetInputBinding().GetVar()] + lit, ok := inputs[artifactID.GetTimePartition().GetValue().GetInputBinding().GetVar()] if !ok { - return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "time partition input binding var [%s] not found in inputs %v", artifactID.GetTimePartition().Value.GetInputBinding().GetVar(), inputs) + return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "time partition input binding var [%s] not found in inputs %v", artifactID.GetTimePartition().GetValue().GetInputBinding().GetVar(), inputs) } if lit.GetScalar().GetPrimitive().GetDatetime() == nil { return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "time partition binding to input var [%s] failing because %v is not a datetime", - artifactID.GetTimePartition().Value.GetInputBinding().GetVar(), lit) + artifactID.GetTimePartition().GetValue().GetInputBinding().GetVar(), lit) } timePartition = &core.TimePartition{ Value: &core.LabelValue{ @@ -881,8 +881,8 @@ func (m *ExecutionManager) launchExecutionAndPrepareModel( return nil, nil, nil, err } - if request.Spec.LaunchPlan.ResourceType == core.ResourceType_TASK { - logger.Debugf(ctx, "Launching single task execution with [%+v]", request.Spec.LaunchPlan) + if request.GetSpec().GetLaunchPlan().GetResourceType() == core.ResourceType_TASK { + logger.Debugf(ctx, "Launching single task execution with [%+v]", request.GetSpec().GetLaunchPlan()) // When tasks can have defaults this will need to handle Artifacts as well. ctx, model, err := m.launchSingleTaskExecution(ctx, request, requestedAt) return ctx, model, nil, err @@ -892,7 +892,7 @@ func (m *ExecutionManager) launchExecutionAndPrepareModel( func (m *ExecutionManager) launchExecution( ctx context.Context, request *admin.ExecutionCreateRequest, requestedAt time.Time) (context.Context, *models.Execution, []*models.ExecutionTag, error) { - launchPlanModel, err := util.GetLaunchPlanModel(ctx, m.db, request.Spec.LaunchPlan) + launchPlanModel, err := util.GetLaunchPlanModel(ctx, m.db, request.GetSpec().GetLaunchPlan()) if err != nil { logger.Debugf(ctx, "Failed to get launch plan model for ExecutionCreateRequest %+v with err %v", request, err) return nil, nil, nil, err @@ -905,38 +905,38 @@ func (m *ExecutionManager) launchExecution( var lpExpectedInputs *core.ParameterMap var usedArtifactIDs []*core.ArtifactID - lpExpectedInputs = launchPlan.Closure.ExpectedInputs + lpExpectedInputs = launchPlan.GetClosure().GetExpectedInputs() // Artifacts retrieved will need to be stored somewhere to ensure that we can re-emit events if necessary // in the future, and also to make sure that relaunch and recover can use it if necessary. executionInputs, err := validation.CheckAndFetchInputsForExecution( - request.Inputs, - launchPlan.Spec.FixedInputs, + request.GetInputs(), + launchPlan.GetSpec().GetFixedInputs(), lpExpectedInputs, ) if err != nil { logger.Debugf(ctx, "Failed to CheckAndFetchInputsForExecution with request.Inputs: %+v"+ "fixed inputs: %+v and expected inputs: %+v with err %v", - request.Inputs, launchPlan.Spec.FixedInputs, lpExpectedInputs, err) + request.GetInputs(), launchPlan.GetSpec().GetFixedInputs(), lpExpectedInputs, err) return nil, nil, nil, err } - workflowModel, err := util.GetWorkflowModel(ctx, m.db, launchPlan.Spec.WorkflowId) + workflowModel, err := util.GetWorkflowModel(ctx, m.db, launchPlan.GetSpec().GetWorkflowId()) if err != nil { - logger.Debugf(ctx, "Failed to get workflow with id %+v with err %v", launchPlan.Spec.WorkflowId, err) + logger.Debugf(ctx, "Failed to get workflow with id %+v with err %v", launchPlan.GetSpec().GetWorkflowId(), err) return nil, nil, nil, err } workflow, err := transformers.FromWorkflowModel(workflowModel) if err != nil { - logger.Debugf(ctx, "Failed to get workflow with id %+v with err %v", launchPlan.Spec.WorkflowId, err) + logger.Debugf(ctx, "Failed to get workflow with id %+v with err %v", launchPlan.GetSpec().GetWorkflowId(), err) return nil, nil, nil, err } name := util.GetExecutionName(request) workflowExecutionID := &core.WorkflowExecutionIdentifier{ - Project: request.Project, - Domain: request.Domain, + Project: request.GetProject(), + Domain: request.GetDomain(), Name: name, } @@ -947,7 +947,7 @@ func (m *ExecutionManager) launchExecution( var err error closure, err = util.FetchAndGetWorkflowClosure(groupCtx, m.storageClient, workflowModel.RemoteClosureIdentifier) if err != nil { - logger.Debugf(ctx, "Failed to get workflow with id %+v with err %v", launchPlan.Spec.WorkflowId, err) + logger.Debugf(ctx, "Failed to get workflow with id %+v with err %v", launchPlan.GetSpec().GetWorkflowId(), err) } return err }) @@ -956,15 +956,15 @@ func (m *ExecutionManager) launchExecution( group.Go(func() error { var err error inputsURI, err = common.OffloadLiteralMap(groupCtx, m.storageClient, executionInputs, - workflowExecutionID.Project, workflowExecutionID.Domain, workflowExecutionID.Name, shared.Inputs) + workflowExecutionID.GetProject(), workflowExecutionID.GetDomain(), workflowExecutionID.GetName(), shared.Inputs) return err }) var userInputsURI storage.DataReference group.Go(func() error { var err error - userInputsURI, err = common.OffloadLiteralMap(groupCtx, m.storageClient, request.Inputs, - workflowExecutionID.Project, workflowExecutionID.Domain, workflowExecutionID.Name, shared.UserInputs) + userInputsURI, err = common.OffloadLiteralMap(groupCtx, m.storageClient, request.GetInputs(), + workflowExecutionID.GetProject(), workflowExecutionID.GetDomain(), workflowExecutionID.GetName(), shared.UserInputs) return err }) @@ -972,12 +972,12 @@ func (m *ExecutionManager) launchExecution( if err != nil { return nil, nil, nil, err } - closure.CreatedAt = workflow.Closure.CreatedAt + closure.CreatedAt = workflow.GetClosure().GetCreatedAt() workflow.Closure = closure ctx = getExecutionContext(ctx, workflowExecutionID) - var requestSpec = request.Spec - if requestSpec.Metadata == nil { + var requestSpec = request.GetSpec() + if requestSpec.GetMetadata() == nil { requestSpec.Metadata = &admin.ExecutionMetadata{} } requestSpec.Metadata.Principal = getUser(ctx) @@ -992,13 +992,13 @@ func (m *ExecutionManager) launchExecution( } // Dynamically assign task resource defaults. - platformTaskResources := util.GetTaskResources(ctx, workflow.Id, m.resourceManager, m.config.TaskResourceConfiguration()) - for _, task := range workflow.Closure.CompiledWorkflow.Tasks { + platformTaskResources := util.GetTaskResources(ctx, workflow.GetId(), m.resourceManager, m.config.TaskResourceConfiguration()) + for _, task := range workflow.GetClosure().GetCompiledWorkflow().GetTasks() { m.setCompiledTaskDefaults(ctx, task, platformTaskResources) } // Dynamically assign execution queues. - m.populateExecutionQueue(ctx, workflow.Id, workflow.Closure.CompiledWorkflow) + m.populateExecutionQueue(ctx, workflow.GetId(), workflow.GetClosure().GetCompiledWorkflow()) executionConfig, err := m.getExecutionConfig(ctx, request, launchPlan) if err != nil { @@ -1006,23 +1006,23 @@ func (m *ExecutionManager) launchExecution( } namespace := common.GetNamespaceName( - m.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), workflowExecutionID.Project, workflowExecutionID.Domain) + m.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), workflowExecutionID.GetProject(), workflowExecutionID.GetDomain()) - labels, err := resolveStringMap(executionConfig.GetLabels(), launchPlan.Spec.Labels, "labels", m.config.RegistrationValidationConfiguration().GetMaxLabelEntries()) + labels, err := resolveStringMap(executionConfig.GetLabels(), launchPlan.GetSpec().GetLabels(), "labels", m.config.RegistrationValidationConfiguration().GetMaxLabelEntries()) if err != nil { return nil, nil, nil, err } - labels, err = m.addProjectLabels(ctx, request.Project, labels) + labels, err = m.addProjectLabels(ctx, request.GetProject(), labels) if err != nil { return nil, nil, nil, err } - annotations, err := resolveStringMap(executionConfig.GetAnnotations(), launchPlan.Spec.Annotations, "annotations", m.config.RegistrationValidationConfiguration().GetMaxAnnotationEntries()) + annotations, err := resolveStringMap(executionConfig.GetAnnotations(), launchPlan.GetSpec().GetAnnotations(), "annotations", m.config.RegistrationValidationConfiguration().GetMaxAnnotationEntries()) if err != nil { return nil, nil, nil, err } var rawOutputDataConfig *admin.RawOutputDataConfig - if executionConfig.RawOutputDataConfig != nil { - rawOutputDataConfig = executionConfig.RawOutputDataConfig + if executionConfig.GetRawOutputDataConfig() != nil { + rawOutputDataConfig = executionConfig.GetRawOutputDataConfig() } clusterAssignment, err := m.getClusterAssignment(ctx, request) @@ -1031,8 +1031,8 @@ func (m *ExecutionManager) launchExecution( } var executionClusterLabel *admin.ExecutionClusterLabel - if requestSpec.ExecutionClusterLabel != nil { - executionClusterLabel = requestSpec.ExecutionClusterLabel + if requestSpec.GetExecutionClusterLabel() != nil { + executionClusterLabel = requestSpec.GetExecutionClusterLabel() } executionParameters := workflowengineInterfaces.ExecutionParameters{ @@ -1049,7 +1049,7 @@ func (m *ExecutionManager) launchExecution( ExecutionClusterLabel: executionClusterLabel, } - overrides, err := m.addPluginOverrides(ctx, workflowExecutionID, launchPlan.GetSpec().WorkflowId.Name, launchPlan.Id.Name) + overrides, err := m.addPluginOverrides(ctx, workflowExecutionID, launchPlan.GetSpec().GetWorkflowId().GetName(), launchPlan.GetId().GetName()) if err != nil { return nil, nil, nil, err } @@ -1057,9 +1057,9 @@ func (m *ExecutionManager) launchExecution( executionParameters.TaskPluginOverrides = overrides } - if request.Spec.Metadata != nil && request.Spec.Metadata.ReferenceExecution != nil && - request.Spec.Metadata.Mode == admin.ExecutionMetadata_RECOVERED { - executionParameters.RecoveryExecution = request.Spec.Metadata.ReferenceExecution + if request.GetSpec().GetMetadata() != nil && request.GetSpec().GetMetadata().GetReferenceExecution() != nil && + request.GetSpec().GetMetadata().GetMode() == admin.ExecutionMetadata_RECOVERED { + executionParameters.RecoveryExecution = request.GetSpec().GetMetadata().GetReferenceExecution() } executionCreatedAt := time.Now() @@ -1068,12 +1068,12 @@ func (m *ExecutionManager) launchExecution( // Request notification settings takes precedence over the launch plan settings. // If there is no notification in the request and DisableAll is not true, use the settings from the launch plan. var notificationsSettings []*admin.Notification - if launchPlan.Spec.GetEntityMetadata() != nil { - notificationsSettings = launchPlan.Spec.EntityMetadata.GetNotifications() + if launchPlan.GetSpec().GetEntityMetadata() != nil { + notificationsSettings = launchPlan.GetSpec().GetEntityMetadata().GetNotifications() } - if requestSpec.GetNotifications() != nil && requestSpec.GetNotifications().Notifications != nil && - len(requestSpec.GetNotifications().Notifications) > 0 { - notificationsSettings = requestSpec.GetNotifications().Notifications + if requestSpec.GetNotifications() != nil && requestSpec.GetNotifications().GetNotifications() != nil && + len(requestSpec.GetNotifications().GetNotifications()) > 0 { + notificationsSettings = requestSpec.GetNotifications().GetNotifications() } else if requestSpec.GetDisableAll() { notificationsSettings = make([]*admin.Notification, 0) } @@ -1086,13 +1086,13 @@ func (m *ExecutionManager) launchExecution( // The execution is not considered running until the propeller sends a specific event saying so. CreatedAt: m._clock.Now(), Notifications: notificationsSettings, - WorkflowIdentifier: workflow.Id, + WorkflowIdentifier: workflow.GetId(), ParentNodeExecutionID: parentNodeExecutionID, SourceExecutionID: sourceExecutionID, InputsURI: inputsURI, UserInputsURI: userInputsURI, - SecurityContext: executionConfig.SecurityContext, - LaunchEntity: launchPlan.Id.ResourceType, + SecurityContext: executionConfig.GetSecurityContext(), + LaunchEntity: launchPlan.GetId().GetResourceType(), Namespace: namespace, } @@ -1100,9 +1100,9 @@ func (m *ExecutionManager) launchExecution( execInfo, execErr := workflowExecutor.Execute(ctx, workflowengineInterfaces.ExecutionData{ Namespace: namespace, ExecutionID: workflowExecutionID, - ReferenceWorkflowName: workflow.Id.Name, - ReferenceLaunchPlanName: launchPlan.Id.Name, - WorkflowClosure: workflow.Closure.CompiledWorkflow, + ReferenceWorkflowName: workflow.GetId().GetName(), + ReferenceLaunchPlanName: launchPlan.GetId().GetName(), + WorkflowClosure: workflow.GetClosure().GetCompiledWorkflow(), WorkflowClosureReference: storage.DataReference(workflowModel.RemoteClosureIdentifier), ExecutionParameters: executionParameters, OffloadedInputsReference: inputsURI, @@ -1160,7 +1160,7 @@ func (m *ExecutionManager) CreateExecution( *admin.ExecutionCreateResponse, error) { // Prior to flyteidl v0.15.0, Inputs was held in ExecutionSpec. Ensure older clients continue to work. - if request.Inputs == nil || len(request.Inputs.Literals) == 0 { + if request.GetInputs() == nil || len(request.GetInputs().GetLiterals()) == 0 { request.Inputs = request.GetSpec().GetInputs() } var executionModel *models.Execution @@ -1182,7 +1182,7 @@ func (m *ExecutionManager) CreateExecution( func (m *ExecutionManager) RelaunchExecution( ctx context.Context, request *admin.ExecutionRelaunchRequest, requestedAt time.Time) ( *admin.ExecutionCreateResponse, error) { - existingExecutionModel, err := util.GetExecutionModel(ctx, m.db, request.Id) + existingExecutionModel, err := util.GetExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Debugf(ctx, "Failed to get execution model for request [%+v] with err %v", request, err) return nil, err @@ -1192,8 +1192,8 @@ func (m *ExecutionManager) RelaunchExecution( return nil, err } - executionSpec := existingExecution.Spec - if executionSpec.Metadata == nil { + executionSpec := existingExecution.GetSpec() + if executionSpec.GetMetadata() == nil { executionSpec.Metadata = &admin.ExecutionMetadata{} } var inputs *core.LiteralMap @@ -1209,17 +1209,17 @@ func (m *ExecutionManager) RelaunchExecution( if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal spec") } - inputs = spec.Inputs + inputs = spec.GetInputs() } executionSpec.Metadata.Mode = admin.ExecutionMetadata_RELAUNCH - executionSpec.Metadata.ReferenceExecution = existingExecution.Id + executionSpec.Metadata.ReferenceExecution = existingExecution.GetId() executionSpec.OverwriteCache = request.GetOverwriteCache() var executionModel *models.Execution var executionTagModel []*models.ExecutionTag ctx, executionModel, executionTagModel, err = m.launchExecutionAndPrepareModel(ctx, &admin.ExecutionCreateRequest{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Name, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetName(), Spec: executionSpec, Inputs: inputs, }, requestedAt) @@ -1231,7 +1231,7 @@ func (m *ExecutionManager) RelaunchExecution( if err != nil { return nil, err } - logger.Debugf(ctx, "Successfully relaunched [%+v] as [%+v]", request.Id, workflowExecutionIdentifier) + logger.Debugf(ctx, "Successfully relaunched [%+v] as [%+v]", request.GetId(), workflowExecutionIdentifier) return &admin.ExecutionCreateResponse{ Id: workflowExecutionIdentifier, }, nil @@ -1240,7 +1240,7 @@ func (m *ExecutionManager) RelaunchExecution( func (m *ExecutionManager) RecoverExecution( ctx context.Context, request *admin.ExecutionRecoverRequest, requestedAt time.Time) ( *admin.ExecutionCreateResponse, error) { - existingExecutionModel, err := util.GetExecutionModel(ctx, m.db, request.Id) + existingExecutionModel, err := util.GetExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Debugf(ctx, "Failed to get execution model for request [%+v] with err %v", request, err) return nil, err @@ -1250,8 +1250,8 @@ func (m *ExecutionManager) RecoverExecution( return nil, err } - executionSpec := existingExecution.Spec - if executionSpec.Metadata == nil { + executionSpec := existingExecution.GetSpec() + if executionSpec.GetMetadata() == nil { executionSpec.Metadata = &admin.ExecutionMetadata{} } var inputs *core.LiteralMap @@ -1261,17 +1261,17 @@ func (m *ExecutionManager) RecoverExecution( return nil, err } } - if request.Metadata != nil { - executionSpec.Metadata.ParentNodeExecution = request.Metadata.ParentNodeExecution + if request.GetMetadata() != nil { + executionSpec.Metadata.ParentNodeExecution = request.GetMetadata().GetParentNodeExecution() } executionSpec.Metadata.Mode = admin.ExecutionMetadata_RECOVERED - executionSpec.Metadata.ReferenceExecution = existingExecution.Id + executionSpec.Metadata.ReferenceExecution = existingExecution.GetId() var executionModel *models.Execution var executionTagModel []*models.ExecutionTag ctx, executionModel, executionTagModel, err = m.launchExecutionAndPrepareModel(ctx, &admin.ExecutionCreateRequest{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Name, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetName(), Spec: executionSpec, Inputs: inputs, }, requestedAt) @@ -1283,7 +1283,7 @@ func (m *ExecutionManager) RecoverExecution( if err != nil { return nil, err } - logger.Infof(ctx, "Successfully recovered [%+v] as [%+v]", request.Id, workflowExecutionIdentifier) + logger.Infof(ctx, "Successfully recovered [%+v] as [%+v]", request.GetId(), workflowExecutionIdentifier) return &admin.ExecutionCreateResponse{ Id: workflowExecutionIdentifier, }, nil @@ -1304,20 +1304,20 @@ func (m *ExecutionManager) emitScheduledWorkflowMetrics( "[%s/%s/%s]", executionModel.Project, executionModel.Domain, executionModel.Name) return } - launchPlan, err := util.GetLaunchPlan(context.Background(), m.db, execution.Spec.LaunchPlan) + launchPlan, err := util.GetLaunchPlan(context.Background(), m.db, execution.GetSpec().GetLaunchPlan()) if err != nil { logger.Warningf(context.Background(), "failed to find launch plan when emitting scheduled workflow execution stats with for "+ - "execution: [%+v] and launch plan [%+v]", execution.Id, execution.Spec.LaunchPlan) + "execution: [%+v] and launch plan [%+v]", execution.GetId(), execution.GetSpec().GetLaunchPlan()) return } - if launchPlan.Spec.EntityMetadata == nil || - launchPlan.Spec.EntityMetadata.Schedule == nil || - launchPlan.Spec.EntityMetadata.Schedule.KickoffTimeInputArg == "" { + if launchPlan.GetSpec().GetEntityMetadata() == nil || + launchPlan.GetSpec().GetEntityMetadata().GetSchedule() == nil || + launchPlan.GetSpec().GetEntityMetadata().GetSchedule().GetKickoffTimeInputArg() == "" { // Kickoff time arguments aren't always required for scheduled workflows. logger.Debugf(context.Background(), "no kickoff time to report for scheduled workflow execution [%+v]", - execution.Id) + execution.GetId()) return } @@ -1327,13 +1327,13 @@ func (m *ExecutionManager) emitScheduledWorkflowMetrics( logger.Errorf(ctx, "Failed to find inputs for emitting schedule delay event from uri: [%v]", executionModel.InputsURI) return } - scheduledKickoffTimeProto := inputs.Literals[launchPlan.Spec.EntityMetadata.Schedule.KickoffTimeInputArg] + scheduledKickoffTimeProto := inputs.GetLiterals()[launchPlan.GetSpec().GetEntityMetadata().GetSchedule().GetKickoffTimeInputArg()] if scheduledKickoffTimeProto == nil || scheduledKickoffTimeProto.GetScalar() == nil || scheduledKickoffTimeProto.GetScalar().GetPrimitive() == nil || scheduledKickoffTimeProto.GetScalar().GetPrimitive().GetDatetime() == nil { logger.Warningf(context.Background(), "failed to find scheduled kickoff time datetime value for scheduled workflow execution [%+v] "+ - "although one was expected", execution.Id) + "although one was expected", execution.GetId()) return } scheduledKickoffTime, err := ptypes.Timestamp(scheduledKickoffTimeProto.GetScalar().GetPrimitive().GetDatetime()) @@ -1347,16 +1347,16 @@ func (m *ExecutionManager) emitScheduledWorkflowMetrics( return } - domainCounterMap, ok := m.userMetrics.ScheduledExecutionDelays[execution.Id.Project] + domainCounterMap, ok := m.userMetrics.ScheduledExecutionDelays[execution.GetId().GetProject()] if !ok { domainCounterMap = make(map[string]*promutils.StopWatch) - m.userMetrics.ScheduledExecutionDelays[execution.Id.Project] = domainCounterMap + m.userMetrics.ScheduledExecutionDelays[execution.GetId().GetProject()] = domainCounterMap } var watch *promutils.StopWatch - watch, ok = domainCounterMap[execution.Id.Domain] + watch, ok = domainCounterMap[execution.GetId().GetDomain()] if !ok { - newWatch, err := m.systemMetrics.Scope.NewSubScope(execution.Id.Project).NewSubScope(execution.Id.Domain).NewStopWatch( + newWatch, err := m.systemMetrics.Scope.NewSubScope(execution.GetId().GetProject()).NewSubScope(execution.GetId().GetDomain()).NewStopWatch( "scheduled_execution_delay", "delay between scheduled execution time and time execution was observed running", time.Nanosecond) @@ -1367,7 +1367,7 @@ func (m *ExecutionManager) emitScheduledWorkflowMetrics( return } watch = &newWatch - domainCounterMap[execution.Id.Domain] = watch + domainCounterMap[execution.GetId().GetDomain()] = watch } watch.Observe(scheduledKickoffTime, runningEventTime) } @@ -1421,30 +1421,30 @@ func (m *ExecutionManager) CreateWorkflowEvent(ctx context.Context, request *adm *admin.WorkflowExecutionEventResponse, error) { err := validation.ValidateCreateWorkflowEventRequest(request, m.config.ApplicationConfiguration().GetRemoteDataConfig().MaxSizeInBytes) if err != nil { - logger.Debugf(ctx, "received invalid CreateWorkflowEventRequest [%s]: %v", request.RequestId, err) + logger.Debugf(ctx, "received invalid CreateWorkflowEventRequest [%s]: %v", request.GetRequestId(), err) return nil, err } - ctx = getExecutionContext(ctx, request.Event.ExecutionId) + ctx = getExecutionContext(ctx, request.GetEvent().GetExecutionId()) logger.Debugf(ctx, "Received workflow execution event for [%+v] transitioning to phase [%v]", - request.Event.ExecutionId, request.Event.Phase) + request.GetEvent().GetExecutionId(), request.GetEvent().GetPhase()) - executionModel, err := util.GetExecutionModel(ctx, m.db, request.Event.ExecutionId) + executionModel, err := util.GetExecutionModel(ctx, m.db, request.GetEvent().GetExecutionId()) if err != nil { logger.Debugf(ctx, "failed to find execution [%+v] for recorded event [%s]: %v", - request.Event.ExecutionId, request.RequestId, err) + request.GetEvent().GetExecutionId(), request.GetRequestId(), err) return nil, err } wfExecPhase := core.WorkflowExecution_Phase(core.WorkflowExecution_Phase_value[executionModel.Phase]) // Subsequent queued events announcing a cluster reassignment are permitted. - if request.Event.Phase != core.WorkflowExecution_QUEUED { - if wfExecPhase == request.Event.Phase { + if request.GetEvent().GetPhase() != core.WorkflowExecution_QUEUED { + if wfExecPhase == request.GetEvent().GetPhase() { logger.Debugf(ctx, "This phase %s was already recorded for workflow execution %v", - wfExecPhase.String(), request.Event.ExecutionId) + wfExecPhase.String(), request.GetEvent().GetExecutionId()) return nil, errors.NewFlyteAdminErrorf(codes.AlreadyExists, "This phase %s was already recorded for workflow execution %v", - wfExecPhase.String(), request.Event.ExecutionId) - } else if err := validation.ValidateCluster(ctx, executionModel.Cluster, request.Event.ProducerId); err != nil { + wfExecPhase.String(), request.GetEvent().GetExecutionId()) + } else if err := validation.ValidateCluster(ctx, executionModel.Cluster, request.GetEvent().GetProducerId()); err != nil { // Only perform event cluster validation **after** an execution has moved on from QUEUED. return nil, err } @@ -1453,22 +1453,22 @@ func (m *ExecutionManager) CreateWorkflowEvent(ctx context.Context, request *adm if common.IsExecutionTerminal(wfExecPhase) { // Cannot go backwards in time from a terminal state to anything else curPhase := wfExecPhase.String() - errorMsg := fmt.Sprintf("Invalid phase change from %s to %s for workflow execution %v", curPhase, request.Event.Phase.String(), request.Event.ExecutionId) + errorMsg := fmt.Sprintf("Invalid phase change from %s to %s for workflow execution %v", curPhase, request.GetEvent().GetPhase().String(), request.GetEvent().GetExecutionId()) return nil, errors.NewAlreadyInTerminalStateError(ctx, errorMsg, curPhase) - } else if wfExecPhase == core.WorkflowExecution_RUNNING && request.Event.Phase == core.WorkflowExecution_QUEUED { + } else if wfExecPhase == core.WorkflowExecution_RUNNING && request.GetEvent().GetPhase() == core.WorkflowExecution_QUEUED { // Cannot go back in time from RUNNING -> QUEUED return nil, errors.NewFlyteAdminErrorf(codes.FailedPrecondition, "Cannot go from %s to %s for workflow execution %v", - wfExecPhase.String(), request.Event.Phase.String(), request.Event.ExecutionId) - } else if wfExecPhase == core.WorkflowExecution_ABORTING && !common.IsExecutionTerminal(request.Event.Phase) { + wfExecPhase.String(), request.GetEvent().GetPhase().String(), request.GetEvent().GetExecutionId()) + } else if wfExecPhase == core.WorkflowExecution_ABORTING && !common.IsExecutionTerminal(request.GetEvent().GetPhase()) { return nil, errors.NewFlyteAdminErrorf(codes.FailedPrecondition, - "Invalid phase change from aborting to %s for workflow execution %v", request.Event.Phase.String(), request.Event.ExecutionId) + "Invalid phase change from aborting to %s for workflow execution %v", request.GetEvent().GetPhase().String(), request.GetEvent().GetExecutionId()) } err = transformers.UpdateExecutionModelState(ctx, executionModel, request, m.config.ApplicationConfiguration().GetRemoteDataConfig().InlineEventDataPolicy, m.storageClient) if err != nil { logger.Debugf(ctx, "failed to transform updated workflow execution model [%+v] after receiving event with err: %v", - request.Event.ExecutionId, err) + request.GetEvent().GetExecutionId(), err) return nil, err } err = m.db.ExecutionRepo().Update(ctx, *executionModel) @@ -1479,28 +1479,28 @@ func (m *ExecutionManager) CreateWorkflowEvent(ctx context.Context, request *adm } m.dbEventWriter.Write(request) - if request.Event.Phase == core.WorkflowExecution_RUNNING { + if request.GetEvent().GetPhase() == core.WorkflowExecution_RUNNING { // Workflow executions are created in state "UNDEFINED". All the time up until a RUNNING event is received is // considered system-induced delay. if executionModel.Mode == int32(admin.ExecutionMetadata_SCHEDULED) { - go m.emitScheduledWorkflowMetrics(ctx, executionModel, request.Event.OccurredAt) + go m.emitScheduledWorkflowMetrics(ctx, executionModel, request.GetEvent().GetOccurredAt()) } - } else if common.IsExecutionTerminal(request.Event.Phase) { - if request.Event.Phase == core.WorkflowExecution_FAILED { + } else if common.IsExecutionTerminal(request.GetEvent().GetPhase()) { + if request.GetEvent().GetPhase() == core.WorkflowExecution_FAILED { // request.Event is expected to be of type WorkflowExecutionEvent_Error when workflow fails. // if not, log the error and continue - if err := request.Event.GetError(); err != nil { - ctx = context.WithValue(ctx, common.ErrorKindKey, err.Kind.String()) + if err := request.GetEvent().GetError(); err != nil { + ctx = context.WithValue(ctx, common.ErrorKindKey, err.GetKind().String()) } else { logger.Warning(ctx, "Failed to parse error for FAILED request [%+v]", request) } } m.systemMetrics.ActiveExecutions.Dec() - m.systemMetrics.ExecutionsTerminated.Inc(contextutils.WithPhase(ctx, request.Event.Phase.String())) - go m.emitOverallWorkflowExecutionTime(executionModel, request.Event.OccurredAt) - if request.Event.GetOutputData() != nil { - m.userMetrics.WorkflowExecutionOutputBytes.Observe(float64(proto.Size(request.Event.GetOutputData()))) + m.systemMetrics.ExecutionsTerminated.Inc(contextutils.WithPhase(ctx, request.GetEvent().GetPhase().String())) + go m.emitOverallWorkflowExecutionTime(executionModel, request.GetEvent().GetOccurredAt()) + if request.GetEvent().GetOutputData() != nil { + m.userMetrics.WorkflowExecutionOutputBytes.Observe(float64(proto.Size(request.GetEvent().GetOutputData()))) } err = m.publishNotifications(ctx, request, *executionModel) @@ -1515,14 +1515,14 @@ func (m *ExecutionManager) CreateWorkflowEvent(ctx context.Context, request *adm if err := m.eventPublisher.Publish(ctx, proto.MessageName(request), request); err != nil { m.systemMetrics.PublishEventError.Inc() - logger.Infof(ctx, "error publishing event [%+v] with err: [%v]", request.RequestId, err) + logger.Infof(ctx, "error publishing event [%+v] with err: [%v]", request.GetRequestId(), err) } go func() { ceCtx := context.TODO() if err := m.cloudEventPublisher.Publish(ceCtx, proto.MessageName(request), request); err != nil { m.systemMetrics.PublishEventError.Inc() - logger.Infof(ctx, "error publishing cloud event [%+v] with err: [%v]", request.RequestId, err) + logger.Infof(ctx, "error publishing cloud event [%+v] with err: [%v]", request.GetRequestId(), err) } }() @@ -1531,12 +1531,12 @@ func (m *ExecutionManager) CreateWorkflowEvent(ctx context.Context, request *adm func (m *ExecutionManager) GetExecution( ctx context.Context, request *admin.WorkflowExecutionGetRequest) (*admin.Execution, error) { - if err := validation.ValidateWorkflowExecutionIdentifier(request.Id); err != nil { + if err := validation.ValidateWorkflowExecutionIdentifier(request.GetId()); err != nil { logger.Debugf(ctx, "GetExecution request [%+v] failed validation with err: %v", request, err) return nil, err } - ctx = getExecutionContext(ctx, request.Id) - executionModel, err := util.GetExecutionModel(ctx, m.db, request.Id) + ctx = getExecutionContext(ctx, request.GetId()) + executionModel, err := util.GetExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Debugf(ctx, "Failed to get execution model for request [%+v] with err: %v", request, err) return nil, err @@ -1547,7 +1547,7 @@ func (m *ExecutionManager) GetExecution( DefaultNamespace: namespace, }) if transformerErr != nil { - logger.Debugf(ctx, "Failed to transform execution model [%+v] to proto object with err: %v", request.Id, + logger.Debugf(ctx, "Failed to transform execution model [%+v] to proto object with err: %v", request.GetId(), transformerErr) return nil, transformerErr } @@ -1557,18 +1557,18 @@ func (m *ExecutionManager) GetExecution( func (m *ExecutionManager) UpdateExecution(ctx context.Context, request *admin.ExecutionUpdateRequest, requestedAt time.Time) (*admin.ExecutionUpdateResponse, error) { - if err := validation.ValidateWorkflowExecutionIdentifier(request.Id); err != nil { + if err := validation.ValidateWorkflowExecutionIdentifier(request.GetId()); err != nil { logger.Debugf(ctx, "UpdateExecution request [%+v] failed validation with err: %v", request, err) return nil, err } - ctx = getExecutionContext(ctx, request.Id) - executionModel, err := util.GetExecutionModel(ctx, m.db, request.Id) + ctx = getExecutionContext(ctx, request.GetId()) + executionModel, err := util.GetExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Debugf(ctx, "Failed to get execution model for request [%+v] with err: %v", request, err) return nil, err } - if err = transformers.UpdateExecutionModelStateChangeDetails(executionModel, request.State, requestedAt, + if err = transformers.UpdateExecutionModelStateChangeDetails(executionModel, request.GetState(), requestedAt, getUser(ctx)); err != nil { return nil, err } @@ -1582,15 +1582,15 @@ func (m *ExecutionManager) UpdateExecution(ctx context.Context, request *admin.E func (m *ExecutionManager) GetExecutionData( ctx context.Context, request *admin.WorkflowExecutionGetDataRequest) (*admin.WorkflowExecutionGetDataResponse, error) { - ctx = getExecutionContext(ctx, request.Id) - executionModel, err := util.GetExecutionModel(ctx, m.db, request.Id) + ctx = getExecutionContext(ctx, request.GetId()) + executionModel, err := util.GetExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Debugf(ctx, "Failed to get execution model for request [%+v] with err: %v", request, err) return nil, err } execution, err := transformers.FromExecutionModel(ctx, *executionModel, transformers.DefaultExecutionTransformerOptions) if err != nil { - logger.Debugf(ctx, "Failed to transform execution model [%+v] to proto object with err: %v", request.Id, err) + logger.Debugf(ctx, "Failed to transform execution model [%+v] to proto object with err: %v", request.GetId(), err) return nil, err } // Prior to flyteidl v0.15.0, Inputs were held in ExecutionClosure and were not offloaded. Ensure we can return the inputs as expected. @@ -1600,7 +1600,7 @@ func (m *ExecutionManager) GetExecutionData( if err := proto.Unmarshal(executionModel.Closure, closure); err != nil { return nil, err } - newInputsURI, err := common.OffloadLiteralMap(ctx, m.storageClient, closure.ComputedInputs, request.Id.Project, request.Id.Domain, request.Id.Name, shared.Inputs) + newInputsURI, err := common.OffloadLiteralMap(ctx, m.storageClient, closure.GetComputedInputs(), request.GetId().GetProject(), request.GetId().GetDomain(), request.GetId().GetName(), shared.Inputs) if err != nil { return nil, err } @@ -1626,7 +1626,7 @@ func (m *ExecutionManager) GetExecutionData( group.Go(func() error { var err error outputs, outputURLBlob, err = util.GetOutputs(groupCtx, m.urlData, m.config.ApplicationConfiguration().GetRemoteDataConfig(), - m.storageClient, util.ToExecutionClosureInterface(execution.Closure)) + m.storageClient, util.ToExecutionClosureInterface(execution.GetClosure())) return err }) @@ -1642,11 +1642,11 @@ func (m *ExecutionManager) GetExecutionData( FullOutputs: outputs, } - m.userMetrics.WorkflowExecutionInputBytes.Observe(float64(response.Inputs.Bytes)) - if response.Outputs.Bytes > 0 { - m.userMetrics.WorkflowExecutionOutputBytes.Observe(float64(response.Outputs.Bytes)) - } else if response.FullOutputs != nil { - m.userMetrics.WorkflowExecutionOutputBytes.Observe(float64(proto.Size(response.FullOutputs))) + m.userMetrics.WorkflowExecutionInputBytes.Observe(float64(response.GetInputs().GetBytes())) + if response.GetOutputs().GetBytes() > 0 { + m.userMetrics.WorkflowExecutionOutputBytes.Observe(float64(response.GetOutputs().GetBytes())) + } else if response.GetFullOutputs() != nil { + m.userMetrics.WorkflowExecutionOutputBytes.Observe(float64(proto.Size(response.GetFullOutputs()))) } return response, nil } @@ -1658,26 +1658,26 @@ func (m *ExecutionManager) ListExecutions( logger.Debugf(ctx, "ListExecutions request [%+v] failed validation with err: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Id.Project, request.Id.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetId().GetProject(), request.GetId().GetDomain()) filters, err := util.GetDbFilters(util.FilterSpec{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, // Optional, may be empty. - RequestFilters: request.Filters, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), // Optional, may be empty. + RequestFilters: request.GetFilters(), }, common.Execution) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.ExecutionColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.ExecutionColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid pagination token %s for ListExecutions", - request.Token) + request.GetToken()) } joinTableEntities := make(map[common.Entity]bool) for _, filter := range filters { @@ -1690,7 +1690,7 @@ func (m *ExecutionManager) ListExecutions( } listExecutionsInput := repositoryInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, @@ -1717,7 +1717,7 @@ func (m *ExecutionManager) ListExecutions( // END TO BE DELETED var token string - if len(executionList) == int(request.Limit) { + if len(executionList) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(executionList)) } return &admin.ExecutionList{ @@ -1736,16 +1736,16 @@ func (m *ExecutionManager) publishNotifications(ctx context.Context, request *ad if err != nil { // This shouldn't happen because execution manager marshaled the data into models.Execution. m.systemMetrics.TransformerError.Inc() - return errors.NewFlyteAdminErrorf(codes.Internal, "Failed to transform execution [%+v] with err: %v", request.Event.ExecutionId, err) + return errors.NewFlyteAdminErrorf(codes.Internal, "Failed to transform execution [%+v] with err: %v", request.GetEvent().GetExecutionId(), err) } - var notificationsList = adminExecution.Closure.Notifications + var notificationsList = adminExecution.GetClosure().GetNotifications() logger.Debugf(ctx, "publishing notifications for execution [%+v] in state [%+v] for notifications [%+v]", - request.Event.ExecutionId, request.Event.Phase, notificationsList) + request.GetEvent().GetExecutionId(), request.GetEvent().GetPhase(), notificationsList) for _, notification := range notificationsList { // Check if the notification phase matches the current one. var matchPhase = false - for _, phase := range notification.Phases { - if phase == request.Event.Phase { + for _, phase := range notification.GetPhases() { + if phase == request.GetEvent().GetPhase() { matchPhase = true } } @@ -1765,11 +1765,11 @@ func (m *ExecutionManager) publishNotifications(ctx context.Context, request *ad } else if notification.GetSlack() != nil { emailNotification.RecipientsEmail = notification.GetSlack().GetRecipientsEmail() } else { - logger.Debugf(ctx, "failed to publish notification, encountered unrecognized type: %v", notification.Type) + logger.Debugf(ctx, "failed to publish notification, encountered unrecognized type: %v", notification.GetType()) m.systemMetrics.UnexpectedDataError.Inc() // Unsupported notification types should have been caught when the launch plan was being created. return errors.NewFlyteAdminErrorf(codes.Internal, "Unsupported notification type [%v] for execution [%+v]", - notification.Type, request.Event.ExecutionId) + notification.GetType(), request.GetEvent().GetExecutionId()) } // Convert the email Notification into an email message to be published. @@ -1789,19 +1789,19 @@ func (m *ExecutionManager) publishNotifications(ctx context.Context, request *ad func (m *ExecutionManager) TerminateExecution( ctx context.Context, request *admin.ExecutionTerminateRequest) (*admin.ExecutionTerminateResponse, error) { - if err := validation.ValidateWorkflowExecutionIdentifier(request.Id); err != nil { + if err := validation.ValidateWorkflowExecutionIdentifier(request.GetId()); err != nil { logger.Debugf(ctx, "received terminate execution request: %v with invalid identifier: %v", request, err) return nil, err } - ctx = getExecutionContext(ctx, request.Id) + ctx = getExecutionContext(ctx, request.GetId()) // Save the abort reason (best effort) executionModel, err := m.db.ExecutionRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), }) if err != nil { - logger.Infof(ctx, "couldn't find execution [%+v] to save termination cause", request.Id) + logger.Infof(ctx, "couldn't find execution [%+v] to save termination cause", request.GetId()) return nil, err } @@ -1809,24 +1809,24 @@ func (m *ExecutionManager) TerminateExecution( return nil, errors.NewAlreadyInTerminalStateError(ctx, "Cannot abort an already terminated workflow execution", executionModel.Phase) } - err = transformers.SetExecutionAborting(&executionModel, request.Cause, getUser(ctx)) + err = transformers.SetExecutionAborting(&executionModel, request.GetCause(), getUser(ctx)) if err != nil { - logger.Debugf(ctx, "failed to add abort metadata for execution [%+v] with err: %v", request.Id, err) + logger.Debugf(ctx, "failed to add abort metadata for execution [%+v] with err: %v", request.GetId(), err) return nil, err } err = m.db.ExecutionRepo().Update(ctx, executionModel) if err != nil { - logger.Debugf(ctx, "failed to save abort cause for terminated execution: %+v with err: %v", request.Id, err) + logger.Debugf(ctx, "failed to save abort cause for terminated execution: %+v with err: %v", request.GetId(), err) return nil, err } workflowExecutor := plugins.Get[workflowengineInterfaces.WorkflowExecutor](m.pluginRegistry, plugins.PluginIDWorkflowExecutor) err = workflowExecutor.Abort(ctx, workflowengineInterfaces.AbortData{ Namespace: common.GetNamespaceName( - m.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), request.Id.Project, request.Id.Domain), + m.config.NamespaceMappingConfiguration().GetNamespaceTemplate(), request.GetId().GetProject(), request.GetId().GetDomain()), - ExecutionID: request.Id, + ExecutionID: request.GetId(), Cluster: executionModel.Cluster, }) if err != nil { @@ -1916,7 +1916,7 @@ func (m *ExecutionManager) addProjectLabels(ctx context.Context, projectName str return nil, err } // passing nil domain as not needed to retrieve labels - projectLabels := transformers.FromProjectModel(project, nil).Labels.GetValues() + projectLabels := transformers.FromProjectModel(project, nil).GetLabels().GetValues() if initialLabels == nil { initialLabels = make(map[string]string) diff --git a/flyteadmin/pkg/manager/impl/execution_manager_test.go b/flyteadmin/pkg/manager/impl/execution_manager_test.go index 5e874a4589..79068d25ff 100644 --- a/flyteadmin/pkg/manager/impl/execution_manager_test.go +++ b/flyteadmin/pkg/manager/impl/execution_manager_test.go @@ -60,7 +60,7 @@ const ( executionClusterLabel = "execution_cluster_label" ) -var spec = testutils.GetExecutionRequest().Spec +var spec = testutils.GetExecutionRequest().GetSpec() var specBytes, _ = proto.Marshal(spec) var phase = core.WorkflowExecution_RUNNING.String() var closure = admin.ExecutionClosure{ @@ -94,8 +94,8 @@ var resourceLimits = runtimeInterfaces.TaskResourceSet{ func getLegacySpec() *admin.ExecutionSpec { executionRequest := testutils.GetExecutionRequest() - legacySpec := executionRequest.Spec - legacySpec.Inputs = executionRequest.Inputs + legacySpec := executionRequest.GetSpec() + legacySpec.Inputs = executionRequest.GetInputs() return legacySpec } @@ -121,7 +121,7 @@ func getExpectedLegacySpecBytes() []byte { } func getExpectedSpec() *admin.ExecutionSpec { - expectedSpec := testutils.GetExecutionRequest().Spec + expectedSpec := testutils.GetExecutionRequest().GetSpec() expectedSpec.Metadata = &admin.ExecutionMetadata{ SystemMetadata: &admin.SystemMetadata{ Namespace: "project-domain", @@ -138,7 +138,7 @@ func getExpectedSpecBytes() []byte { func getLegacyClosure() *admin.ExecutionClosure { return &admin.ExecutionClosure{ Phase: core.WorkflowExecution_RUNNING, - ComputedInputs: getLegacySpec().Inputs, + ComputedInputs: getLegacySpec().GetInputs(), StateChangeDetails: &admin.ExecutionStateChangeDetails{ State: admin.ExecutionState_EXECUTION_ACTIVE, OccurredAt: testutils.MockCreatedAtProto, @@ -153,7 +153,7 @@ func getLegacyClosureBytes() []byte { func getLegacyExecutionRequest() *admin.ExecutionCreateRequest { r := testutils.GetExecutionRequest() - r.Spec.Inputs = r.Inputs + r.Spec.Inputs = r.GetInputs() r.Inputs = nil return r } @@ -193,7 +193,7 @@ func setDefaultLpCallbackForExecTest(repository interfaces.Repository) { lpSpecBytes, _ := proto.Marshal(lpSpec) lpClosure := admin.LaunchPlanClosure{ - ExpectedInputs: lpSpec.DefaultInputs, + ExpectedInputs: lpSpec.GetDefaultInputs(), } lpClosureBytes, _ := proto.Marshal(&lpClosure) @@ -313,11 +313,11 @@ func TestCreateExecution(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.NoError(t, err) - assert.Equal(t, principal, spec.Metadata.Principal) - assert.Equal(t, rawOutput, spec.RawOutputDataConfig.OutputLocationPrefix) - assert.True(t, proto.Equal(spec.ClusterAssignment, &clusterAssignment)) + assert.Equal(t, principal, spec.GetMetadata().GetPrincipal()) + assert.Equal(t, rawOutput, spec.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.True(t, proto.Equal(spec.GetClusterAssignment(), &clusterAssignment)) assert.Equal(t, "launch_plan", input.LaunchEntity) - assert.Equal(t, spec.GetMetadata().GetSystemMetadata().Namespace, "project-domain") + assert.Equal(t, spec.GetMetadata().GetSystemMetadata().GetNamespace(), "project-domain") return nil }) setDefaultLpCallbackForExecTest(repository) @@ -347,10 +347,10 @@ func TestCreateExecution(t *testing.T) { mockExecutor.OnExecuteMatch(mock.Anything, mock.MatchedBy(func(data workflowengineInterfaces.ExecutionData) bool { tasks := data.WorkflowClosure.GetTasks() for _, task := range tasks { - assert.Equal(t, len(resources.Requests), len(task.Template.GetContainer().Resources.Requests)) - for i, request := range resources.Requests { - assert.True(t, proto.Equal(request, task.Template.GetContainer().Resources.Requests[i])) - assert.True(t, proto.Equal(request, task.Template.GetContainer().Resources.Limits[i])) + assert.Equal(t, len(resources.GetRequests()), len(task.GetTemplate().GetContainer().GetResources().GetRequests())) + for i, request := range resources.GetRequests() { + assert.True(t, proto.Equal(request, task.GetTemplate().GetContainer().GetResources().GetRequests()[i])) + assert.True(t, proto.Equal(request, task.GetTemplate().GetContainer().GetResources().GetLimits()[i])) } } @@ -401,7 +401,7 @@ func TestCreateExecution(t *testing.T) { Id: &executionIdentifier, } assert.NoError(t, err) - assert.True(t, proto.Equal(expectedResponse.Id, response.Id)) + assert.True(t, proto.Equal(expectedResponse.GetId(), response.GetId())) // TODO: Check for offloaded inputs } @@ -436,9 +436,9 @@ func TestCreateExecutionFromWorkflowNode(t *testing.T) { var clusterLabel = &admin.ExecutionClusterLabel{Value: executionClusterLabel} repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback( func(ctx context.Context, input interfaces.Identifier) (models.Execution, error) { - assert.EqualValues(t, input.Project, parentNodeExecutionID.ExecutionId.Project) - assert.EqualValues(t, input.Domain, parentNodeExecutionID.ExecutionId.Domain) - assert.EqualValues(t, input.Name, parentNodeExecutionID.ExecutionId.Name) + assert.EqualValues(t, input.Project, parentNodeExecutionID.GetExecutionId().GetProject()) + assert.EqualValues(t, input.Domain, parentNodeExecutionID.GetExecutionId().GetDomain()) + assert.EqualValues(t, input.Name, parentNodeExecutionID.GetExecutionId().GetName()) spec := &admin.ExecutionSpec{ Metadata: &admin.ExecutionMetadata{ Nesting: 1, @@ -463,13 +463,13 @@ func TestCreateExecutionFromWorkflowNode(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.NoError(t, err) - assert.Equal(t, admin.ExecutionMetadata_CHILD_WORKFLOW, spec.Metadata.Mode) - assert.True(t, proto.Equal(parentNodeExecutionID, spec.Metadata.ParentNodeExecution)) + assert.Equal(t, admin.ExecutionMetadata_CHILD_WORKFLOW, spec.GetMetadata().GetMode()) + assert.True(t, proto.Equal(parentNodeExecutionID, spec.GetMetadata().GetParentNodeExecution())) assert.EqualValues(t, input.ParentNodeExecutionID, 1) assert.EqualValues(t, input.SourceExecutionID, 2) - assert.Equal(t, 2, int(spec.Metadata.Nesting)) - assert.Equal(t, principal, spec.Metadata.Principal) - assert.Equal(t, executionClusterLabel, spec.ExecutionClusterLabel.Value) + assert.Equal(t, 2, int(spec.GetMetadata().GetNesting())) + assert.Equal(t, principal, spec.GetMetadata().GetPrincipal()) + assert.Equal(t, executionClusterLabel, spec.GetExecutionClusterLabel().GetValue()) assert.Equal(t, principal, input.User) return nil }, @@ -505,14 +505,14 @@ func TestCreateExecution_NoAssignedName(t *testing.T) { setDefaultLpCallbackForExecTest(repository) repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetCreateCallback( func(ctx context.Context, input models.Execution) error { - assert.Equal(t, executionIdentifier.Project, input.Project) - assert.Equal(t, executionIdentifier.Domain, input.Domain) + assert.Equal(t, executionIdentifier.GetProject(), input.Project) + assert.Equal(t, executionIdentifier.GetDomain(), input.Domain) assert.NotEmpty(t, input.Name) return nil }) mockExecutor := workflowengineMocks.WorkflowExecutor{} mockExecutor.OnExecuteMatch(mock.Anything, mock.MatchedBy(func(data workflowengineInterfaces.ExecutionData) bool { - return len(data.ExecutionID.Name) > 0 + return len(data.ExecutionID.GetName()) > 0 })).Return(workflowengineInterfaces.ExecutionResponse{ Cluster: testCluster, }, nil) @@ -530,9 +530,9 @@ func TestCreateExecution_NoAssignedName(t *testing.T) { Id: &executionIdentifier, } assert.Nil(t, err) - assert.Equal(t, expectedResponse.Id.Project, response.Id.Project) - assert.Equal(t, expectedResponse.Id.Domain, response.Id.Domain) - assert.NotEmpty(t, response.Id.Name) + assert.Equal(t, expectedResponse.GetId().GetProject(), response.GetId().GetProject()) + assert.Equal(t, expectedResponse.GetId().GetDomain(), response.GetId().GetDomain()) + assert.NotEmpty(t, response.GetId().GetName()) } func TestCreateExecution_TaggedQueue(t *testing.T) { @@ -558,11 +558,11 @@ func TestCreateExecution_TaggedQueue(t *testing.T) { mockExecutor := workflowengineMocks.WorkflowExecutor{} mockExecutor.OnExecuteMatch(mock.Anything, mock.MatchedBy(func(data workflowengineInterfaces.ExecutionData) bool { - assert.NotEmpty(t, data.WorkflowClosure.Tasks) - for _, task := range data.WorkflowClosure.Tasks { - assert.Len(t, task.Template.GetContainer().Config, 1) - assert.Contains(t, childContainerQueueKey, task.Template.GetContainer().Config[0].Key) - assert.Contains(t, "dynamic Q", task.Template.GetContainer().Config[0].Value) + assert.NotEmpty(t, data.WorkflowClosure.GetTasks()) + for _, task := range data.WorkflowClosure.GetTasks() { + assert.Len(t, task.GetTemplate().GetContainer().GetConfig(), 1) + assert.Contains(t, childContainerQueueKey, task.GetTemplate().GetContainer().GetConfig()[0].GetKey()) + assert.Contains(t, "dynamic Q", task.GetTemplate().GetContainer().GetConfig()[0].GetValue()) } return true })).Return(workflowengineInterfaces.ExecutionResponse{ @@ -720,14 +720,14 @@ func TestCreateExecutionVerifyDbModel(t *testing.T) { if err != nil { return err } - assert.Nil(t, specValue.Inputs) + assert.Nil(t, specValue.GetInputs()) var closureValue admin.ExecutionClosure err = proto.Unmarshal(input.Closure, &closureValue) if err != nil { return err } - assert.Nil(t, closureValue.ComputedInputs) + assert.Nil(t, closureValue.GetComputedInputs()) var userInputs, inputs core.LiteralMap if err := storageClient.ReadProtobuf(ctx, input.UserInputsURI, &userInputs); err != nil { @@ -737,19 +737,19 @@ func TestCreateExecutionVerifyDbModel(t *testing.T) { return err } fooValue := coreutils.MustMakeLiteral("foo-value-1") - assert.Equal(t, 1, len(userInputs.Literals)) - assert.EqualValues(t, userInputs.Literals["foo"], fooValue) + assert.Equal(t, 1, len(userInputs.GetLiterals())) + assert.EqualValues(t, userInputs.GetLiterals()["foo"], fooValue) barValue := coreutils.MustMakeLiteral("bar-value") - assert.Equal(t, len(inputs.Literals), 2) - assert.EqualValues(t, inputs.Literals["foo"], fooValue) - assert.EqualValues(t, inputs.Literals["bar"], barValue) - assert.Equal(t, core.WorkflowExecution_UNDEFINED, closureValue.Phase) + assert.Equal(t, len(inputs.GetLiterals()), 2) + assert.EqualValues(t, inputs.GetLiterals()["foo"], fooValue) + assert.EqualValues(t, inputs.GetLiterals()["bar"], barValue) + assert.Equal(t, core.WorkflowExecution_UNDEFINED, closureValue.GetPhase()) assert.Equal(t, createdAt, *input.ExecutionCreatedAt) - assert.Equal(t, 1, len(closureValue.Notifications)) - assert.Equal(t, 1, len(closureValue.Notifications[0].Phases)) - assert.Equal(t, request.Spec.GetNotifications().Notifications[0].Phases[0], closureValue.Notifications[0].Phases[0]) - assert.IsType(t, &admin.Notification_Slack{}, closureValue.Notifications[0].GetType()) - assert.Equal(t, request.Spec.GetNotifications().Notifications[0].GetSlack().RecipientsEmail, closureValue.Notifications[0].GetSlack().RecipientsEmail) + assert.Equal(t, 1, len(closureValue.GetNotifications())) + assert.Equal(t, 1, len(closureValue.GetNotifications()[0].GetPhases())) + assert.Equal(t, request.GetSpec().GetNotifications().GetNotifications()[0].GetPhases()[0], closureValue.GetNotifications()[0].GetPhases()[0]) + assert.IsType(t, &admin.Notification_Slack{}, closureValue.GetNotifications()[0].GetType()) + assert.Equal(t, request.GetSpec().GetNotifications().GetNotifications()[0].GetSlack().GetRecipientsEmail(), closureValue.GetNotifications()[0].GetSlack().GetRecipientsEmail()) return nil } @@ -766,7 +766,7 @@ func TestCreateExecutionVerifyDbModel(t *testing.T) { response, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) - assert.True(t, proto.Equal(&executionIdentifier, response.Id)) + assert.True(t, proto.Equal(&executionIdentifier, response.GetId())) } func TestCreateExecutionDefaultNotifications(t *testing.T) { @@ -790,10 +790,10 @@ func TestCreateExecutionDefaultNotifications(t *testing.T) { return err } - assert.Equal(t, 1, len(closureValue.Notifications)) - assert.Equal(t, 1, len(closureValue.Notifications[0].Phases)) - assert.Equal(t, core.WorkflowExecution_SUCCEEDED, closureValue.Notifications[0].Phases[0]) - assert.IsType(t, &admin.Notification_Email{}, closureValue.Notifications[0].GetType()) + assert.Equal(t, 1, len(closureValue.GetNotifications())) + assert.Equal(t, 1, len(closureValue.GetNotifications()[0].GetPhases())) + assert.Equal(t, core.WorkflowExecution_SUCCEEDED, closureValue.GetNotifications()[0].GetPhases()[0]) + assert.IsType(t, &admin.Notification_Email{}, closureValue.GetNotifications()[0].GetType()) return nil } @@ -811,7 +811,7 @@ func TestCreateExecutionDefaultNotifications(t *testing.T) { Project: "project", Domain: "domain", Name: "name", - }, response.Id)) + }, response.GetId())) } func TestCreateExecutionDisableNotifications(t *testing.T) { @@ -833,7 +833,7 @@ func TestCreateExecutionDisableNotifications(t *testing.T) { return err } - assert.Empty(t, closureValue.Notifications) + assert.Empty(t, closureValue.GetNotifications()) return nil } repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetCreateCallback(exCreateFunc) @@ -850,7 +850,7 @@ func TestCreateExecutionDisableNotifications(t *testing.T) { Project: "project", Domain: "domain", Name: "name", - }, response.Id)) + }, response.GetId())) } func TestCreateExecutionNoNotifications(t *testing.T) { @@ -868,7 +868,7 @@ func TestCreateExecutionNoNotifications(t *testing.T) { lpSpec.EntityMetadata.Notifications = nil lpSpecBytes, _ := proto.Marshal(lpSpec) lpClosure := admin.LaunchPlanClosure{ - ExpectedInputs: lpSpec.DefaultInputs, + ExpectedInputs: lpSpec.GetDefaultInputs(), } lpClosureBytes, _ := proto.Marshal(&lpClosure) @@ -920,7 +920,7 @@ func TestCreateExecutionNoNotifications(t *testing.T) { Project: "project", Domain: "domain", Name: "name", - }, response.Id)) + }, response.GetId())) } func TestCreateExecutionDynamicLabelsAndAnnotations(t *testing.T) { @@ -1209,8 +1209,8 @@ func TestCreateExecutionWithEnvs(t *testing.T) { assert.Equal(t, uint(0), input.TaskID) } if len(tt.envs) != 0 { - assert.Equal(t, tt.envs[0].Key, spec.GetEnvs().Values[0].Key) - assert.Equal(t, tt.envs[0].Value, spec.GetEnvs().Values[0].Value) + assert.Equal(t, tt.envs[0].GetKey(), spec.GetEnvs().GetValues()[0].GetKey()) + assert.Equal(t, tt.envs[0].GetValue(), spec.GetEnvs().GetValues()[0].GetValue()) } else { assert.Nil(t, spec.GetEnvs().GetValues()) } @@ -1244,7 +1244,7 @@ func TestCreateExecution_CustomNamespaceMappingConfig(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.NoError(t, err) - assert.Equal(t, spec.GetMetadata().GetSystemMetadata().Namespace, "project") + assert.Equal(t, spec.GetMetadata().GetSystemMetadata().GetNamespace(), "project") return nil } @@ -1272,7 +1272,7 @@ func TestCreateExecution_CustomNamespaceMappingConfig(t *testing.T) { response, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) - assert.True(t, proto.Equal(&executionIdentifier, response.Id)) + assert.True(t, proto.Equal(&executionIdentifier, response.GetId())) } func makeExecutionGetFunc( @@ -1341,7 +1341,7 @@ func makeExecutionInterruptibleGetFunc( request.Spec.Interruptible = &wrappers.BoolValue{Value: *interruptible} } - specBytes, err := proto.Marshal(request.Spec) + specBytes, err := proto.Marshal(request.GetSpec()) assert.Nil(t, err) return models.Execution{ @@ -1374,7 +1374,7 @@ func makeExecutionOverwriteCacheGetFunc( request := testutils.GetExecutionRequest() request.Spec.OverwriteCache = overwriteCache - specBytes, err := proto.Marshal(request.Spec) + specBytes, err := proto.Marshal(request.GetSpec()) assert.Nil(t, err) return models.Execution{ @@ -1407,7 +1407,7 @@ func makeExecutionWithEnvs( request := testutils.GetExecutionRequest() request.Spec.Envs.Values = envs - specBytes, err := proto.Marshal(request.Spec) + specBytes, err := proto.Marshal(request.GetSpec()) assert.Nil(t, err) return models.Execution{ @@ -1460,7 +1460,7 @@ func TestRelaunchExecution(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RELAUNCH), input.Mode) return nil } @@ -1600,7 +1600,7 @@ func TestRelaunchExecutionInterruptibleOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RELAUNCH), input.Mode) assert.NotNil(t, spec.GetInterruptible()) assert.True(t, spec.GetInterruptible().GetValue()) @@ -1652,7 +1652,7 @@ func TestRelaunchExecutionOverwriteCacheOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RELAUNCH), input.Mode) assert.True(t, spec.GetOverwriteCache()) return nil @@ -1687,7 +1687,7 @@ func TestRelaunchExecutionOverwriteCacheOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RELAUNCH), input.Mode) assert.False(t, spec.GetOverwriteCache()) return nil @@ -1722,7 +1722,7 @@ func TestRelaunchExecutionOverwriteCacheOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RELAUNCH), input.Mode) assert.False(t, spec.GetOverwriteCache()) return nil @@ -1774,11 +1774,11 @@ func TestRelaunchExecutionEnvsOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RELAUNCH), input.Mode) assert.NotNil(t, spec.GetEnvs()) - assert.Equal(t, spec.GetEnvs().Values[0].Key, env[0].Key) - assert.Equal(t, spec.GetEnvs().Values[0].Value, env[0].Value) + assert.Equal(t, spec.GetEnvs().GetValues()[0].GetKey(), env[0].GetKey()) + assert.Equal(t, spec.GetEnvs().GetValues()[0].GetValue(), env[0].GetValue()) return nil } repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetCreateCallback(exCreateFunc) @@ -1825,7 +1825,7 @@ func TestRecoverExecution(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RECOVERED), input.Mode) return nil } @@ -1904,7 +1904,7 @@ func TestRecoverExecution_RecoveredChildNode(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RECOVERED), input.Mode) assert.Equal(t, parentNodeDatabaseID, input.ParentNodeExecutionID) assert.Equal(t, referencedExecutionID, input.SourceExecutionID) @@ -2067,7 +2067,7 @@ func TestRecoverExecutionInterruptibleOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RECOVERED), input.Mode) assert.NotNil(t, spec.GetInterruptible()) assert.True(t, spec.GetInterruptible().GetValue()) @@ -2129,7 +2129,7 @@ func TestRecoverExecutionOverwriteCacheOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RECOVERED), input.Mode) assert.True(t, spec.GetOverwriteCache()) return nil @@ -2189,11 +2189,11 @@ func TestRecoverExecutionEnvsOverride(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.Metadata.Mode) + assert.Equal(t, admin.ExecutionMetadata_RECOVERED, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RECOVERED), input.Mode) assert.NotNil(t, spec.GetEnvs()) - assert.Equal(t, spec.GetEnvs().GetValues()[0].Key, env[0].Key) - assert.Equal(t, spec.GetEnvs().GetValues()[0].Value, env[0].Value) + assert.Equal(t, spec.GetEnvs().GetValues()[0].GetKey(), env[0].GetKey()) + assert.Equal(t, spec.GetEnvs().GetValues()[0].GetValue(), env[0].GetValue()) return nil } repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetCreateCallback(exCreateFunc) @@ -2843,9 +2843,9 @@ func TestGetExecution(t *testing.T) { Id: &executionIdentifier, }) assert.NoError(t, err) - assert.True(t, proto.Equal(&executionIdentifier, execution.Id)) - assert.True(t, proto.Equal(getExpectedSpec(), execution.Spec)) - assert.True(t, proto.Equal(&closure, execution.Closure)) + assert.True(t, proto.Equal(&executionIdentifier, execution.GetId())) + assert.True(t, proto.Equal(getExpectedSpec(), execution.GetSpec())) + assert.True(t, proto.Equal(&closure, execution.GetClosure())) } func TestGetExecution_DatabaseError(t *testing.T) { @@ -3070,18 +3070,18 @@ func TestListExecutions(t *testing.T) { }) assert.NoError(t, err) assert.NotNil(t, executionList) - assert.Len(t, executionList.Executions, 2) + assert.Len(t, executionList.GetExecutions(), 2) - for idx, execution := range executionList.Executions { - assert.Equal(t, projectValue, execution.Id.Project) - assert.Equal(t, domainValue, execution.Id.Domain) + for idx, execution := range executionList.GetExecutions() { + assert.Equal(t, projectValue, execution.GetId().GetProject()) + assert.Equal(t, domainValue, execution.GetId().GetDomain()) if idx == 0 { - assert.Equal(t, "my awesome execution", execution.Id.Name) + assert.Equal(t, "my awesome execution", execution.GetId().GetName()) } - assert.True(t, proto.Equal(getExpectedSpec(), execution.Spec)) - assert.True(t, proto.Equal(&closure, execution.Closure)) + assert.True(t, proto.Equal(getExpectedSpec(), execution.GetSpec())) + assert.True(t, proto.Equal(&closure, execution.GetClosure())) } - assert.Empty(t, executionList.Token) + assert.Empty(t, executionList.GetToken()) } func TestListExecutions_MissingParameters(t *testing.T) { @@ -3212,7 +3212,7 @@ func TestExecutionManager_PublishNotifications(t *testing.T) { }, } var execClosure = &admin.ExecutionClosure{ - Notifications: testutils.GetExecutionRequest().Spec.GetNotifications().Notifications, + Notifications: testutils.GetExecutionRequest().GetSpec().GetNotifications().GetNotifications(), WorkflowId: &core.Identifier{ ResourceType: core.ResourceType_WORKFLOW, Project: "wf_project", @@ -3248,8 +3248,8 @@ func TestExecutionManager_PublishNotifications(t *testing.T) { }, }, } - execClosure.Notifications = append(execClosure.Notifications, extraNotifications[0]) - execClosure.Notifications = append(execClosure.Notifications, extraNotifications[1]) + execClosure.Notifications = append(execClosure.GetNotifications(), extraNotifications[0]) + execClosure.Notifications = append(execClosure.GetNotifications(), extraNotifications[1]) execClosureBytes, _ := proto.Marshal(execClosure) executionModel := models.Execution{ @@ -3351,7 +3351,7 @@ func TestExecutionManager_TestExecutionManager_PublishNotificationsTransformErro }, } var execClosure = &admin.ExecutionClosure{ - Notifications: testutils.GetExecutionRequest().Spec.GetNotifications().Notifications, + Notifications: testutils.GetExecutionRequest().GetSpec().GetNotifications().GetNotifications(), WorkflowId: &core.Identifier{ ResourceType: core.ResourceType_WORKFLOW, Project: "wf_project", @@ -3402,7 +3402,7 @@ func TestExecutionManager_PublishNotificationsNoPhaseMatch(t *testing.T) { }, } var execClosure = &admin.ExecutionClosure{ - Notifications: testutils.GetExecutionRequest().Spec.GetNotifications().Notifications, + Notifications: testutils.GetExecutionRequest().GetSpec().GetNotifications().GetNotifications(), } execClosureBytes, _ := proto.Marshal(execClosure) executionModel := models.Execution{ @@ -3723,12 +3723,12 @@ func TestAddPluginOverrides(t *testing.T) { assert.NoError(t, err) assert.Len(t, taskPluginOverrides, 2) for _, override := range taskPluginOverrides { - if override.TaskType == "python" { - assert.EqualValues(t, []string{"plugin a"}, override.PluginId) - } else if override.TaskType == "hive" { - assert.EqualValues(t, []string{"plugin b"}, override.PluginId) + if override.GetTaskType() == "python" { + assert.EqualValues(t, []string{"plugin a"}, override.GetPluginId()) + } else if override.GetTaskType() == "hive" { + assert.EqualValues(t, []string{"plugin b"}, override.GetPluginId()) } else { - t.Errorf("Unexpected task type [%s] plugin override committed to db", override.TaskType) + t.Errorf("Unexpected task type [%s] plugin override committed to db", override.GetTaskType()) } } } @@ -3788,9 +3788,9 @@ func TestGetExecution_Legacy(t *testing.T) { Id: &executionIdentifier, }) assert.NoError(t, err) - assert.True(t, proto.Equal(&executionIdentifier, execution.Id)) - assert.True(t, proto.Equal(getExpectedLegacySpec(), execution.Spec)) - assert.True(t, proto.Equal(getLegacyClosure(), execution.Closure)) + assert.True(t, proto.Equal(&executionIdentifier, execution.GetId())) + assert.True(t, proto.Equal(getExpectedLegacySpec(), execution.GetSpec())) + assert.True(t, proto.Equal(getLegacyClosure(), execution.GetClosure())) } func TestGetExecutionData_LegacyModel(t *testing.T) { @@ -3870,7 +3870,7 @@ func TestGetExecutionData_LegacyModel(t *testing.T) { var inputs core.LiteralMap err = storageClient.ReadProtobuf(context.Background(), storage.DataReference("s3://bucket/metadata/project/domain/name/inputs"), &inputs) assert.Nil(t, err) - assert.True(t, proto.Equal(&inputs, closure.ComputedInputs)) + assert.True(t, proto.Equal(&inputs, closure.GetComputedInputs())) } func TestCreateExecution_LegacyClient(t *testing.T) { @@ -3937,10 +3937,10 @@ func TestRelaunchExecution_LegacyModel(t *testing.T) { var spec admin.ExecutionSpec err := proto.Unmarshal(input.Spec, &spec) assert.Nil(t, err) - assert.Equal(t, "default_raw_output", spec.RawOutputDataConfig.OutputLocationPrefix) - assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.Metadata.Mode) + assert.Equal(t, "default_raw_output", spec.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.Equal(t, admin.ExecutionMetadata_RELAUNCH, spec.GetMetadata().GetMode()) assert.Equal(t, int32(admin.ExecutionMetadata_RELAUNCH), input.Mode) - assert.True(t, proto.Equal(spec.Inputs, getLegacySpec().Inputs)) + assert.True(t, proto.Equal(spec.GetInputs(), getLegacySpec().GetInputs())) return nil } repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetCreateCallback(exCreateFunc) @@ -3971,12 +3971,12 @@ func TestRelaunchExecution_LegacyModel(t *testing.T) { var userInputs core.LiteralMap err = storageClient.ReadProtobuf(context.Background(), "s3://bucket/metadata/project/domain/relaunchy/user_inputs", &userInputs) assert.Nil(t, err) - assert.True(t, proto.Equal(&userInputs, getLegacySpec().Inputs)) + assert.True(t, proto.Equal(&userInputs, getLegacySpec().GetInputs())) var inputs core.LiteralMap err = storageClient.ReadProtobuf(context.Background(), "s3://bucket/metadata/project/domain/relaunchy/inputs", &inputs) assert.Nil(t, err) - assert.True(t, proto.Equal(&inputs, existingClosure.ComputedInputs)) + assert.True(t, proto.Equal(&inputs, existingClosure.GetComputedInputs())) } func TestListExecutions_LegacyModel(t *testing.T) { @@ -4052,18 +4052,18 @@ func TestListExecutions_LegacyModel(t *testing.T) { }) assert.NoError(t, err) assert.NotNil(t, executionList) - assert.Len(t, executionList.Executions, 2) + assert.Len(t, executionList.GetExecutions(), 2) - for idx, execution := range executionList.Executions { - assert.Equal(t, projectValue, execution.Id.Project) - assert.Equal(t, domainValue, execution.Id.Domain) + for idx, execution := range executionList.GetExecutions() { + assert.Equal(t, projectValue, execution.GetId().GetProject()) + assert.Equal(t, domainValue, execution.GetId().GetDomain()) if idx == 0 { - assert.Equal(t, "my awesome execution", execution.Id.Name) + assert.Equal(t, "my awesome execution", execution.GetId().GetName()) } - assert.True(t, proto.Equal(spec, execution.Spec)) - assert.True(t, proto.Equal(&closure, execution.Closure)) + assert.True(t, proto.Equal(spec, execution.GetSpec())) + assert.True(t, proto.Equal(&closure, execution.GetClosure())) } - assert.Empty(t, executionList.Token) + assert.Empty(t, executionList.GetToken()) } func TestSetDefaults(t *testing.T) { @@ -4148,7 +4148,7 @@ func TestSetDefaults(t *testing.T) { }, }, }, - task.Template.GetContainer()), fmt.Sprintf("%+v", task.Template.GetContainer())) + task.GetTemplate().GetContainer()), fmt.Sprintf("%+v", task.GetTemplate().GetContainer())) } func TestSetDefaults_MissingRequests_ExistingRequestsPreserved(t *testing.T) { @@ -4224,7 +4224,7 @@ func TestSetDefaults_MissingRequests_ExistingRequestsPreserved(t *testing.T) { }, }, }, - task.Template.GetContainer()), fmt.Sprintf("%+v", task.Template.GetContainer())) + task.GetTemplate().GetContainer()), fmt.Sprintf("%+v", task.GetTemplate().GetContainer())) } func TestSetDefaults_OptionalRequiredResources(t *testing.T) { @@ -4288,7 +4288,7 @@ func TestSetDefaults_OptionalRequiredResources(t *testing.T) { }, }, }, - task.Template.GetContainer()), fmt.Sprintf("%+v", task.Template.GetContainer())) + task.GetTemplate().GetContainer()), fmt.Sprintf("%+v", task.GetTemplate().GetContainer())) }) t.Run("respect non-required resources when defaults exist in config", func(t *testing.T) { @@ -4336,7 +4336,7 @@ func TestSetDefaults_OptionalRequiredResources(t *testing.T) { }, }, }, - task.Template.GetContainer()), fmt.Sprintf("%+v", task.Template.GetContainer())) + task.GetTemplate().GetContainer()), fmt.Sprintf("%+v", task.GetTemplate().GetContainer())) }) } @@ -4472,7 +4472,7 @@ func TestCreateSingleTaskExecution(t *testing.T) { }, input.ExecutionKey) assert.Equal(t, "task", input.LaunchEntity) assert.Equal(t, "UNDEFINED", input.Phase) - assert.True(t, proto.Equal(taskIdentifier, spec.LaunchPlan)) + assert.True(t, proto.Equal(taskIdentifier, spec.GetLaunchPlan())) return nil }) @@ -4583,10 +4583,10 @@ func TestGetExecutionConfigOverrides(t *testing.T) { request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { // two requests will be made, one with empty domain and one with filled in domain assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - }, {Project: workflowIdentifier.Project, + }, {Project: workflowIdentifier.GetProject(), Domain: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, }, request) @@ -4631,8 +4631,8 @@ func TestGetExecutionConfigOverrides(t *testing.T) { t.Run("request with full config", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ Labels: &admin.Labels{Values: requestLabels}, Annotations: &admin.Annotations{Values: requestAnnotations}, @@ -4656,20 +4656,20 @@ func TestGetExecutionConfigOverrides(t *testing.T) { ctx := identityContext.WithContext(context.Background()) execConfig, err := executionManager.getExecutionConfig(ctx, request, nil) assert.NoError(t, err) - assert.Equal(t, requestMaxParallelism, execConfig.MaxParallelism) - assert.Equal(t, requestK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) - assert.Equal(t, requestInterruptible, execConfig.Interruptible.Value) - assert.Equal(t, requestOverwriteCache, execConfig.OverwriteCache) - assert.Equal(t, requestOutputLocationPrefix, execConfig.RawOutputDataConfig.OutputLocationPrefix) - assert.Equal(t, requestLabels, execConfig.GetLabels().Values) - assert.Equal(t, requestAnnotations, execConfig.GetAnnotations().Values) + assert.Equal(t, requestMaxParallelism, execConfig.GetMaxParallelism()) + assert.Equal(t, requestK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) + assert.Equal(t, requestInterruptible, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, requestOverwriteCache, execConfig.GetOverwriteCache()) + assert.Equal(t, requestOutputLocationPrefix, execConfig.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.Equal(t, requestLabels, execConfig.GetLabels().GetValues()) + assert.Equal(t, requestAnnotations, execConfig.GetAnnotations().GetValues()) assert.Equal(t, "yeee", execConfig.GetSecurityContext().GetRunAs().GetExecutionIdentity()) - assert.Equal(t, requestEnvironmentVariables, execConfig.GetEnvs().Values) + assert.Equal(t, requestEnvironmentVariables, execConfig.GetEnvs().GetValues()) }) t.Run("request with partial config", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ Labels: &admin.Labels{Values: requestLabels}, RawOutputDataConfig: &admin.RawOutputDataConfig{ @@ -4697,19 +4697,19 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, requestMaxParallelism, execConfig.MaxParallelism) - assert.Equal(t, launchPlanInterruptible, execConfig.Interruptible.Value) - assert.Equal(t, launchPlanOverwriteCache, execConfig.OverwriteCache) - assert.True(t, proto.Equal(launchPlan.Spec.SecurityContext, execConfig.SecurityContext)) - assert.True(t, proto.Equal(launchPlan.Spec.Annotations, execConfig.Annotations)) - assert.Equal(t, requestOutputLocationPrefix, execConfig.RawOutputDataConfig.OutputLocationPrefix) - assert.Equal(t, requestLabels, execConfig.GetLabels().Values) - assert.Equal(t, launchPlanEnvironmentVariables, execConfig.GetEnvs().Values) + assert.Equal(t, requestMaxParallelism, execConfig.GetMaxParallelism()) + assert.Equal(t, launchPlanInterruptible, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, launchPlanOverwriteCache, execConfig.GetOverwriteCache()) + assert.True(t, proto.Equal(launchPlan.GetSpec().GetSecurityContext(), execConfig.GetSecurityContext())) + assert.True(t, proto.Equal(launchPlan.GetSpec().GetAnnotations(), execConfig.GetAnnotations())) + assert.Equal(t, requestOutputLocationPrefix, execConfig.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.Equal(t, requestLabels, execConfig.GetLabels().GetValues()) + assert.Equal(t, launchPlanEnvironmentVariables, execConfig.GetEnvs().GetValues()) }) t.Run("request with empty security context", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ SecurityContext: &core.SecurityContext{ RunAs: &core.Identity{ @@ -4737,18 +4737,18 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, launchPlanMaxParallelism, execConfig.MaxParallelism) - assert.Equal(t, launchPlanInterruptible, execConfig.Interruptible.Value) - assert.Equal(t, launchPlanOverwriteCache, execConfig.OverwriteCache) - assert.Equal(t, launchPlanK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) - assert.Equal(t, launchPlanOutputLocationPrefix, execConfig.RawOutputDataConfig.OutputLocationPrefix) - assert.Equal(t, launchPlanLabels, execConfig.GetLabels().Values) - assert.Equal(t, launchPlanEnvironmentVariables, execConfig.GetEnvs().Values) + assert.Equal(t, launchPlanMaxParallelism, execConfig.GetMaxParallelism()) + assert.Equal(t, launchPlanInterruptible, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, launchPlanOverwriteCache, execConfig.GetOverwriteCache()) + assert.Equal(t, launchPlanK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) + assert.Equal(t, launchPlanOutputLocationPrefix, execConfig.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.Equal(t, launchPlanLabels, execConfig.GetLabels().GetValues()) + assert.Equal(t, launchPlanEnvironmentVariables, execConfig.GetEnvs().GetValues()) }) t.Run("request with no config", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ @@ -4771,19 +4771,19 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, launchPlanMaxParallelism, execConfig.MaxParallelism) - assert.Equal(t, launchPlanInterruptible, execConfig.Interruptible.Value) - assert.Equal(t, launchPlanOverwriteCache, execConfig.OverwriteCache) - assert.Equal(t, launchPlanK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) - assert.Equal(t, launchPlanOutputLocationPrefix, execConfig.RawOutputDataConfig.OutputLocationPrefix) - assert.Equal(t, launchPlanLabels, execConfig.GetLabels().Values) - assert.Equal(t, launchPlanAnnotations, execConfig.GetAnnotations().Values) - assert.Equal(t, launchPlanEnvironmentVariables, execConfig.GetEnvs().Values) + assert.Equal(t, launchPlanMaxParallelism, execConfig.GetMaxParallelism()) + assert.Equal(t, launchPlanInterruptible, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, launchPlanOverwriteCache, execConfig.GetOverwriteCache()) + assert.Equal(t, launchPlanK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) + assert.Equal(t, launchPlanOutputLocationPrefix, execConfig.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.Equal(t, launchPlanLabels, execConfig.GetLabels().GetValues()) + assert.Equal(t, launchPlanAnnotations, execConfig.GetAnnotations().GetValues()) + assert.Equal(t, launchPlanEnvironmentVariables, execConfig.GetEnvs().GetValues()) }) t.Run("launchplan with partial config", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ @@ -4803,18 +4803,18 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, launchPlanMaxParallelism, execConfig.MaxParallelism) - assert.Equal(t, rmInterruptible, execConfig.Interruptible.Value) - assert.Equal(t, rmOverwriteCache, execConfig.OverwriteCache) - assert.Equal(t, launchPlanK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) - assert.Equal(t, launchPlanOutputLocationPrefix, execConfig.RawOutputDataConfig.OutputLocationPrefix) - assert.Equal(t, launchPlanLabels, execConfig.GetLabels().Values) - assert.Equal(t, launchPlanAnnotations, execConfig.GetAnnotations().Values) + assert.Equal(t, launchPlanMaxParallelism, execConfig.GetMaxParallelism()) + assert.Equal(t, rmInterruptible, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, rmOverwriteCache, execConfig.GetOverwriteCache()) + assert.Equal(t, launchPlanK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) + assert.Equal(t, launchPlanOutputLocationPrefix, execConfig.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.Equal(t, launchPlanLabels, execConfig.GetLabels().GetValues()) + assert.Equal(t, launchPlanAnnotations, execConfig.GetAnnotations().GetValues()) }) t.Run("launchplan with no config", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ @@ -4822,23 +4822,23 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, rmMaxParallelism, execConfig.MaxParallelism) - assert.Equal(t, rmInterruptible, execConfig.Interruptible.Value) - assert.Equal(t, rmOverwriteCache, execConfig.OverwriteCache) - assert.Equal(t, rmK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) - assert.Equal(t, rmOutputLocationPrefix, execConfig.RawOutputDataConfig.OutputLocationPrefix) - assert.Equal(t, rmLabels, execConfig.GetLabels().Values) - assert.Equal(t, rmAnnotations, execConfig.GetAnnotations().Values) + assert.Equal(t, rmMaxParallelism, execConfig.GetMaxParallelism()) + assert.Equal(t, rmInterruptible, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, rmOverwriteCache, execConfig.GetOverwriteCache()) + assert.Equal(t, rmK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) + assert.Equal(t, rmOutputLocationPrefix, execConfig.GetRawOutputDataConfig().GetOutputLocationPrefix()) + assert.Equal(t, rmLabels, execConfig.GetLabels().GetValues()) + assert.Equal(t, rmAnnotations, execConfig.GetAnnotations().GetValues()) assert.Nil(t, execConfig.GetEnvs()) }) t.Run("matchable resource partial config", func(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - }, {Project: workflowIdentifier.Project, + }, {Project: workflowIdentifier.GetProject(), Domain: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, }, request) @@ -4860,8 +4860,8 @@ func TestGetExecutionConfigOverrides(t *testing.T) { }, nil } request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ @@ -4869,23 +4869,23 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, rmMaxParallelism, execConfig.MaxParallelism) + assert.Equal(t, rmMaxParallelism, execConfig.GetMaxParallelism()) assert.Nil(t, execConfig.GetInterruptible()) - assert.False(t, execConfig.OverwriteCache) - assert.Equal(t, rmK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.False(t, execConfig.GetOverwriteCache()) + assert.Equal(t, rmK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) - assert.Equal(t, rmAnnotations, execConfig.GetAnnotations().Values) + assert.Equal(t, rmAnnotations, execConfig.GetAnnotations().GetValues()) assert.Nil(t, execConfig.GetEnvs()) }) t.Run("matchable resource with no config", func(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - }, {Project: workflowIdentifier.Project, + }, {Project: workflowIdentifier.GetProject(), Domain: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, }, request) @@ -4898,8 +4898,8 @@ func TestGetExecutionConfigOverrides(t *testing.T) { }, nil } request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ @@ -4907,10 +4907,10 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) assert.Nil(t, execConfig.GetInterruptible()) - assert.False(t, execConfig.OverwriteCache) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.False(t, execConfig.GetOverwriteCache()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) @@ -4920,10 +4920,10 @@ func TestGetExecutionConfigOverrides(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - }, {Project: workflowIdentifier.Project, + }, {Project: workflowIdentifier.GetProject(), Domain: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, }, request) @@ -4937,8 +4937,8 @@ func TestGetExecutionConfigOverrides(t *testing.T) { }, nil } request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ @@ -4950,10 +4950,10 @@ func TestGetExecutionConfigOverrides(t *testing.T) { } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) assert.Nil(t, execConfig.GetInterruptible()) - assert.False(t, execConfig.OverwriteCache) - assert.Equal(t, deprecatedLaunchPlanK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.False(t, execConfig.GetOverwriteCache()) + assert.Equal(t, deprecatedLaunchPlanK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) @@ -4963,11 +4963,11 @@ func TestGetExecutionConfigOverrides(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - Workflow: workflowIdentifier.Name, - }, {Project: workflowIdentifier.Project, + Workflow: workflowIdentifier.GetName(), + }, {Project: workflowIdentifier.GetProject(), Domain: "", Workflow: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, @@ -4991,23 +4991,23 @@ func TestGetExecutionConfigOverrides(t *testing.T) { }, nil } request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ Spec: &admin.LaunchPlanSpec{ WorkflowId: &core.Identifier{ - Name: workflowIdentifier.Name, + Name: workflowIdentifier.GetName(), }, }, } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, int32(300), execConfig.MaxParallelism) - assert.True(t, execConfig.Interruptible.Value) - assert.True(t, execConfig.OverwriteCache) - assert.Equal(t, "workflowDefault", execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, int32(300), execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetInterruptible().GetValue()) + assert.True(t, execConfig.GetOverwriteCache()) + assert.Equal(t, "workflowDefault", execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) @@ -5017,18 +5017,18 @@ func TestGetExecutionConfigOverrides(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - }, {Project: workflowIdentifier.Project, + }, {Project: workflowIdentifier.GetProject(), Domain: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, }, request) return nil, fmt.Errorf("failed to fetch the resources") } request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } launchPlan := &admin.LaunchPlan{ @@ -5049,10 +5049,10 @@ func TestGetExecutionConfigOverrides(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - }, {Project: workflowIdentifier.Project, + }, {Project: workflowIdentifier.GetProject(), Domain: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, }, request) @@ -5070,8 +5070,8 @@ func TestGetExecutionConfigOverrides(t *testing.T) { t.Run("request with interruptible override disabled", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ Interruptible: &wrappers.BoolValue{Value: false}, }, @@ -5079,17 +5079,17 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, nil) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.False(t, execConfig.Interruptible.Value) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.False(t, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("request with interruptible override enabled", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ Interruptible: &wrappers.BoolValue{Value: true}, }, @@ -5097,33 +5097,33 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, nil) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.Interruptible.Value) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("request with no interruptible override specified", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, nil) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.Interruptible.Value) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("launch plan with interruptible override disabled", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } @@ -5135,17 +5135,17 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.False(t, execConfig.Interruptible.Value) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.False(t, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("launch plan with interruptible override enabled", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } @@ -5158,20 +5158,20 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.Interruptible.Value) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) - assert.Equal(t, 1, len(execConfig.Envs.Values)) - assert.Equal(t, "foo", execConfig.Envs.Values[0].Key) - assert.Equal(t, "bar", execConfig.Envs.Values[0].Value) + assert.Equal(t, 1, len(execConfig.GetEnvs().GetValues())) + assert.Equal(t, "foo", execConfig.GetEnvs().GetValues()[0].GetKey()) + assert.Equal(t, "bar", execConfig.GetEnvs().GetValues()[0].GetValue()) }) t.Run("launch plan with no interruptible override specified", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } @@ -5181,17 +5181,17 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.Interruptible.Value) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("request and launch plan with different interruptible overrides", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ Interruptible: &wrappers.BoolValue{Value: true}, }, @@ -5205,17 +5205,17 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.Interruptible.Value) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetInterruptible().GetValue()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("request with skip cache override enabled", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ OverwriteCache: true, }, @@ -5223,33 +5223,33 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, nil) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.OverwriteCache) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetOverwriteCache()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("request with no skip cache override specified", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, nil) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.OverwriteCache) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetOverwriteCache()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("launch plan with skip cache override enabled", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } @@ -5261,17 +5261,17 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.OverwriteCache) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetOverwriteCache()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("launch plan with no skip cache override specified", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } @@ -5281,17 +5281,17 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.OverwriteCache) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetOverwriteCache()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) }) t.Run("request and launch plan with different skip cache overrides", func(t *testing.T) { request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ OverwriteCache: true, }, @@ -5305,9 +5305,9 @@ func TestGetExecutionConfigOverrides(t *testing.T) { execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, launchPlan) assert.NoError(t, err) - assert.Equal(t, defaultMaxParallelism, execConfig.MaxParallelism) - assert.True(t, execConfig.OverwriteCache) - assert.Equal(t, defaultK8sServiceAccount, execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, defaultMaxParallelism, execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetOverwriteCache()) + assert.Equal(t, defaultK8sServiceAccount, execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) assert.Nil(t, execConfig.GetRawOutputDataConfig()) assert.Nil(t, execConfig.GetLabels()) assert.Nil(t, execConfig.GetAnnotations()) @@ -5316,13 +5316,13 @@ func TestGetExecutionConfigOverrides(t *testing.T) { t.Run("test pick up security context from admin system config", func(t *testing.T) { executionManager.config.ApplicationConfiguration().GetTopLevelConfig().K8SServiceAccount = "flyte-test" request := &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, } execConfig, err := executionManager.getExecutionConfig(context.TODO(), request, nil) assert.NoError(t, err) - assert.Equal(t, "flyte-test", execConfig.SecurityContext.RunAs.K8SServiceAccount) + assert.Equal(t, "flyte-test", execConfig.GetSecurityContext().GetRunAs().GetK8SServiceAccount()) executionManager.config.ApplicationConfiguration().GetTopLevelConfig().K8SServiceAccount = defaultK8sServiceAccount }) }) @@ -5333,10 +5333,10 @@ func TestGetExecutionConfig(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.Contains(t, []managerInterfaces.ResourceRequest{{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG, - }, {Project: workflowIdentifier.Project, + }, {Project: workflowIdentifier.GetProject(), Domain: "", ResourceType: admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG}, }, request) @@ -5358,13 +5358,13 @@ func TestGetExecutionConfig(t *testing.T) { config: applicationConfig, } execConfig, err := executionManager.getExecutionConfig(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, }, nil) assert.NoError(t, err) - assert.Equal(t, execConfig.MaxParallelism, int32(100)) - assert.True(t, execConfig.OverwriteCache) + assert.Equal(t, execConfig.GetMaxParallelism(), int32(100)) + assert.True(t, execConfig.GetOverwriteCache()) } func TestGetExecutionConfig_Spec(t *testing.T) { @@ -5379,8 +5379,8 @@ func TestGetExecutionConfig_Spec(t *testing.T) { config: applicationConfig, } execConfig, err := executionManager.getExecutionConfig(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ MaxParallelism: 100, OverwriteCache: true, @@ -5392,12 +5392,12 @@ func TestGetExecutionConfig_Spec(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, int32(100), execConfig.MaxParallelism) - assert.True(t, execConfig.OverwriteCache) + assert.Equal(t, int32(100), execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetOverwriteCache()) execConfig, err = executionManager.getExecutionConfig(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, }, &admin.LaunchPlan{ Spec: &admin.LaunchPlanSpec{ @@ -5406,8 +5406,8 @@ func TestGetExecutionConfig_Spec(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, int32(50), execConfig.MaxParallelism) - assert.True(t, execConfig.OverwriteCache) + assert.Equal(t, int32(50), execConfig.GetMaxParallelism()) + assert.True(t, execConfig.GetOverwriteCache()) resourceManager = managerMocks.MockResourceManager{} resourceManager.GetResourceFunc = func(ctx context.Context, @@ -5422,15 +5422,15 @@ func TestGetExecutionConfig_Spec(t *testing.T) { executionManager.config.ApplicationConfiguration().GetTopLevelConfig().OverwriteCache = true execConfig, err = executionManager.getExecutionConfig(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, }, &admin.LaunchPlan{ Spec: &admin.LaunchPlanSpec{}, }) assert.NoError(t, err) - assert.Equal(t, execConfig.MaxParallelism, int32(25)) - assert.True(t, execConfig.OverwriteCache) + assert.Equal(t, execConfig.GetMaxParallelism(), int32(25)) + assert.True(t, execConfig.GetOverwriteCache()) } func TestGetClusterAssignment(t *testing.T) { @@ -5439,8 +5439,8 @@ func TestGetClusterAssignment(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.EqualValues(t, request, managerInterfaces.ResourceRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_CLUSTER_ASSIGNMENT, }) return &managerInterfaces.ResourceResponse{ @@ -5457,8 +5457,8 @@ func TestGetClusterAssignment(t *testing.T) { } t.Run("value from db", func(t *testing.T) { ca, err := executionManager.getClusterAssignment(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, }) assert.NoError(t, err) @@ -5481,8 +5481,8 @@ func TestGetClusterAssignment(t *testing.T) { } ca, err := executionManager.getClusterAssignment(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, }) assert.NoError(t, err) @@ -5491,8 +5491,8 @@ func TestGetClusterAssignment(t *testing.T) { t.Run("value from request matches value from config", func(t *testing.T) { reqClusterAssignment := admin.ClusterAssignment{ClusterPoolName: "gpu"} ca, err := executionManager.getClusterAssignment(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ ClusterAssignment: &reqClusterAssignment, }, @@ -5510,8 +5510,8 @@ func TestGetClusterAssignment(t *testing.T) { reqClusterAssignment := admin.ClusterAssignment{ClusterPoolName: "gpu"} ca, err := executionManager.getClusterAssignment(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ ClusterAssignment: &reqClusterAssignment, }, @@ -5536,8 +5536,8 @@ func TestGetClusterAssignment(t *testing.T) { reqClusterAssignment := admin.ClusterAssignment{ClusterPoolName: "gpu"} ca, err := executionManager.getClusterAssignment(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ ClusterAssignment: &reqClusterAssignment, }, @@ -5548,8 +5548,8 @@ func TestGetClusterAssignment(t *testing.T) { t.Run("value from request doesn't match value from config", func(t *testing.T) { reqClusterAssignment := admin.ClusterAssignment{ClusterPoolName: "swimming-pool"} _, err := executionManager.getClusterAssignment(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{ ClusterAssignment: &reqClusterAssignment, }, @@ -5564,8 +5564,8 @@ func TestGetClusterAssignment(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.EqualValues(t, request, managerInterfaces.ResourceRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), ResourceType: admin.MatchableResource_CLUSTER_ASSIGNMENT, }) return &managerInterfaces.ResourceResponse{ @@ -5578,8 +5578,8 @@ func TestGetClusterAssignment(t *testing.T) { } _, err := executionManager.getClusterAssignment(context.TODO(), &admin.ExecutionCreateRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), Spec: &admin.ExecutionSpec{}, }) @@ -5622,8 +5622,8 @@ func TestResolvePermissions(t *testing.T) { } authRole := resolveAuthRole(execRequest, lp) sc := resolveSecurityCtx(context.TODO(), execConfigSecCtx, authRole) - assert.Equal(t, assumableIamRole, authRole.AssumableIamRole) - assert.Equal(t, k8sServiceAccount, authRole.KubernetesServiceAccount) + assert.Equal(t, assumableIamRole, authRole.GetAssumableIamRole()) + assert.Equal(t, k8sServiceAccount, authRole.GetKubernetesServiceAccount()) assert.Equal(t, &core.SecurityContext{ RunAs: &core.Identity{ IamRole: assumableIamRole, @@ -5659,10 +5659,10 @@ func TestResolvePermissions(t *testing.T) { }, } sc := resolveSecurityCtx(context.TODO(), execConfigSecCtx, authRole) - assert.Equal(t, "", authRole.AssumableIamRole) - assert.Equal(t, "", authRole.KubernetesServiceAccount) - assert.Equal(t, assumableIamRoleSc, sc.RunAs.IamRole) - assert.Equal(t, k8sServiceAccountSc, sc.RunAs.K8SServiceAccount) + assert.Equal(t, "", authRole.GetAssumableIamRole()) + assert.Equal(t, "", authRole.GetKubernetesServiceAccount()) + assert.Equal(t, assumableIamRoleSc, sc.GetRunAs().GetIamRole()) + assert.Equal(t, k8sServiceAccountSc, sc.GetRunAs().GetK8SServiceAccount()) }) t.Run("prefer lp auth role over auth", func(t *testing.T) { execRequest := &admin.ExecutionCreateRequest{ @@ -5685,8 +5685,8 @@ func TestResolvePermissions(t *testing.T) { RunAs: &core.Identity{}, } sc := resolveSecurityCtx(context.TODO(), execConfigSecCtx, authRole) - assert.Equal(t, assumableIamRole, authRole.AssumableIamRole) - assert.Equal(t, k8sServiceAccount, authRole.KubernetesServiceAccount) + assert.Equal(t, assumableIamRole, authRole.GetAssumableIamRole()) + assert.Equal(t, k8sServiceAccount, authRole.GetKubernetesServiceAccount()) assert.Equal(t, &core.SecurityContext{ RunAs: &core.Identity{ IamRole: assumableIamRole, @@ -5731,10 +5731,10 @@ func TestResolvePermissions(t *testing.T) { }, } sc := resolveSecurityCtx(context.TODO(), execConfigSecCtx, authRole) - assert.Equal(t, assumableIamRole, authRole.AssumableIamRole) - assert.Equal(t, k8sServiceAccount, authRole.KubernetesServiceAccount) - assert.Equal(t, assumableIamRoleSc, sc.RunAs.IamRole) - assert.Equal(t, k8sServiceAccountSc, sc.RunAs.K8SServiceAccount) + assert.Equal(t, assumableIamRole, authRole.GetAssumableIamRole()) + assert.Equal(t, k8sServiceAccount, authRole.GetKubernetesServiceAccount()) + assert.Equal(t, assumableIamRoleSc, sc.GetRunAs().GetIamRole()) + assert.Equal(t, k8sServiceAccountSc, sc.GetRunAs().GetK8SServiceAccount()) }) t.Run("prefer lp auth over role", func(t *testing.T) { execRequest := &admin.ExecutionCreateRequest{ @@ -5757,8 +5757,8 @@ func TestResolvePermissions(t *testing.T) { }, } sc := resolveSecurityCtx(context.TODO(), execConfigSecCtx, authRole) - assert.Equal(t, assumableIamRole, authRole.AssumableIamRole) - assert.Equal(t, k8sServiceAccount, authRole.KubernetesServiceAccount) + assert.Equal(t, assumableIamRole, authRole.GetAssumableIamRole()) + assert.Equal(t, k8sServiceAccount, authRole.GetKubernetesServiceAccount()) assert.Equal(t, &core.SecurityContext{ RunAs: &core.Identity{ IamRole: assumableIamRole, @@ -5778,8 +5778,8 @@ func TestResolvePermissions(t *testing.T) { Role: "old role", }, }) - assert.Equal(t, assumableIamRoleLp, authRole.AssumableIamRole) - assert.Equal(t, k8sServiceAccountLp, authRole.KubernetesServiceAccount) + assert.Equal(t, assumableIamRoleLp, authRole.GetAssumableIamRole()) + assert.Equal(t, k8sServiceAccountLp, authRole.GetKubernetesServiceAccount()) }) } @@ -5859,7 +5859,7 @@ func TestQueryTemplate(t *testing.T) { }, } - filledQuery, err := m.fillInTemplateArgs(ctx, q, otherInputs.Literals) + filledQuery, err := m.fillInTemplateArgs(ctx, q, otherInputs.GetLiterals()) assert.NoError(t, err) assert.True(t, proto.Equal(q, filledQuery)) }) @@ -5881,11 +5881,11 @@ func TestQueryTemplate(t *testing.T) { }, } - filledQuery, err := m.fillInTemplateArgs(ctx, q, otherInputs.Literals) + filledQuery, err := m.fillInTemplateArgs(ctx, q, otherInputs.GetLiterals()) assert.NoError(t, err) - staticTime := filledQuery.GetArtifactId().Partitions.Value["partition1"].GetStaticValue() + staticTime := filledQuery.GetArtifactId().GetPartitions().GetValue()["partition1"].GetStaticValue() assert.Equal(t, "2063-04-05", staticTime) - assert.Equal(t, int64(2942956800), filledQuery.GetArtifactId().TimePartition.Value.GetTimeValue().Seconds) + assert.Equal(t, int64(2942956800), filledQuery.GetArtifactId().GetTimePartition().GetValue().GetTimeValue().GetSeconds()) }) t.Run("something missing", func(t *testing.T) { @@ -5905,7 +5905,7 @@ func TestQueryTemplate(t *testing.T) { }, } - _, err := m.fillInTemplateArgs(ctx, q, otherInputs.Literals) + _, err := m.fillInTemplateArgs(ctx, q, otherInputs.GetLiterals()) assert.Error(t, err) }) } diff --git a/flyteadmin/pkg/manager/impl/executions/quality_of_service.go b/flyteadmin/pkg/manager/impl/executions/quality_of_service.go index a96d99d3d6..c2b6f8d3da 100644 --- a/flyteadmin/pkg/manager/impl/executions/quality_of_service.go +++ b/flyteadmin/pkg/manager/impl/executions/quality_of_service.go @@ -37,9 +37,9 @@ type qualityOfServiceAllocator struct { func (q qualityOfServiceAllocator) getQualityOfServiceFromDb(ctx context.Context, workflowIdentifier *core.Identifier) ( *core.QualityOfService, error) { resource, err := q.resourceManager.GetResource(ctx, interfaces.ResourceRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, - Workflow: workflowIdentifier.Name, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), + Workflow: workflowIdentifier.GetName(), ResourceType: admin.MatchableResource_QUALITY_OF_SERVICE_SPECIFICATION, }) if err != nil { @@ -79,63 +79,62 @@ QualityOfService spec to apply. This method handles resolving the QualityOfService for an execution given the above rules. */ func (q qualityOfServiceAllocator) GetQualityOfService(ctx context.Context, input GetQualityOfServiceInput) (QualityOfServiceSpec, error) { - workflowIdentifier := input.Workflow.Id + workflowIdentifier := input.Workflow.GetId() var qualityOfServiceTier core.QualityOfService_Tier - if input.ExecutionCreateRequest.Spec.QualityOfService != nil { - if input.ExecutionCreateRequest.Spec.QualityOfService.GetSpec() != nil { + if input.ExecutionCreateRequest.GetSpec().GetQualityOfService() != nil { + if input.ExecutionCreateRequest.GetSpec().GetQualityOfService().GetSpec() != nil { logger.Debugf(ctx, "Determining quality of service from execution spec for [%s/%s/%s]", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name) - duration, err := ptypes.Duration(input.ExecutionCreateRequest.Spec.QualityOfService.GetSpec().QueueingBudget) + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName()) + duration, err := ptypes.Duration(input.ExecutionCreateRequest.GetSpec().GetQualityOfService().GetSpec().GetQueueingBudget()) if err != nil { return QualityOfServiceSpec{}, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "Invalid custom quality of service set in create execution request [%s/%s/%s], failed to parse duration [%v] with: %v", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name, - input.ExecutionCreateRequest.Spec.QualityOfService.GetSpec().QueueingBudget, err) + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName(), + input.ExecutionCreateRequest.GetSpec().GetQualityOfService().GetSpec().GetQueueingBudget(), err) } return QualityOfServiceSpec{ QueuingBudget: duration, }, nil } - qualityOfServiceTier = input.ExecutionCreateRequest.Spec.QualityOfService.GetTier() - } else if input.LaunchPlan.Spec.QualityOfService != nil { - if input.LaunchPlan.Spec.QualityOfService.GetSpec() != nil { + qualityOfServiceTier = input.ExecutionCreateRequest.GetSpec().GetQualityOfService().GetTier() + } else if input.LaunchPlan.GetSpec().GetQualityOfService() != nil { + if input.LaunchPlan.GetSpec().GetQualityOfService().GetSpec() != nil { logger.Debugf(ctx, "Determining quality of service from launch plan spec for [%s/%s/%s]", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name) - duration, err := ptypes.Duration(input.LaunchPlan.Spec.QualityOfService.GetSpec().QueueingBudget) + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName()) + duration, err := ptypes.Duration(input.LaunchPlan.GetSpec().GetQualityOfService().GetSpec().GetQueueingBudget()) if err != nil { return QualityOfServiceSpec{}, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "Invalid custom quality of service set in launch plan [%v], failed to parse duration [%v] with: %v", - input.LaunchPlan.Id, - input.ExecutionCreateRequest.Spec.QualityOfService.GetSpec().QueueingBudget, err) + input.LaunchPlan.GetId(), + input.ExecutionCreateRequest.GetSpec().GetQualityOfService().GetSpec().GetQueueingBudget(), err) } return QualityOfServiceSpec{ QueuingBudget: duration, }, nil } - qualityOfServiceTier = input.LaunchPlan.Spec.QualityOfService.GetTier() - } else if input.Workflow.Closure.CompiledWorkflow.Primary.Template.Metadata != nil && - input.Workflow.Closure.CompiledWorkflow.Primary.Template.Metadata.QualityOfService != nil { + qualityOfServiceTier = input.LaunchPlan.GetSpec().GetQualityOfService().GetTier() + } else if input.Workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetMetadata() != nil && + input.Workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetMetadata().GetQualityOfService() != nil { logger.Debugf(ctx, "Determining quality of service from workflow spec for [%s/%s/%s]", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name) - if input.Workflow.Closure.CompiledWorkflow.Primary.Template.Metadata.QualityOfService.GetSpec() != nil { - duration, err := ptypes.Duration(input.Workflow.Closure.CompiledWorkflow.Primary.Template.Metadata.QualityOfService. - GetSpec().QueueingBudget) + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName()) + if input.Workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetMetadata().GetQualityOfService().GetSpec() != nil { + duration, err := ptypes.Duration(input.Workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetMetadata().GetQualityOfService().GetSpec().GetQueueingBudget()) if err != nil { return QualityOfServiceSpec{}, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "Invalid custom quality of service set in workflow [%v], failed to parse duration [%v] with: %v", workflowIdentifier, - input.ExecutionCreateRequest.Spec.QualityOfService.GetSpec().QueueingBudget, err) + input.ExecutionCreateRequest.GetSpec().GetQualityOfService().GetSpec().GetQueueingBudget(), err) } return QualityOfServiceSpec{ QueuingBudget: duration, }, nil } - qualityOfServiceTier = input.Workflow.Closure.CompiledWorkflow.Primary.Template.Metadata.QualityOfService.GetTier() + qualityOfServiceTier = input.Workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetMetadata().GetQualityOfService().GetTier() } // If nothing in the hierarchy of registrable entities has set the quality of service, @@ -147,23 +146,23 @@ func (q qualityOfServiceAllocator) GetQualityOfService(ctx context.Context, inpu } if qualityOfService != nil && qualityOfService.GetSpec() != nil { logger.Debugf(ctx, "Determining quality of service from spec database override for [%s/%s/%s]", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name) - duration, err := ptypes.Duration(qualityOfService.GetSpec().QueueingBudget) + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName()) + duration, err := ptypes.Duration(qualityOfService.GetSpec().GetQueueingBudget()) if err != nil { return QualityOfServiceSpec{}, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "Invalid custom quality of service set in overridable matching attributes for [%v],"+ "failed to parse duration [%v] with: %v", workflowIdentifier, - input.ExecutionCreateRequest.Spec.QualityOfService.GetSpec().QueueingBudget, err) + input.ExecutionCreateRequest.GetSpec().GetQualityOfService().GetSpec().GetQueueingBudget(), err) } return QualityOfServiceSpec{ QueuingBudget: duration, }, nil } else if qualityOfService != nil && qualityOfService.GetTier() != core.QualityOfService_UNDEFINED { logger.Debugf(ctx, "Determining quality of service tier from database override for [%s/%s/%s]", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name) - qualityOfServiceTier = input.Workflow.Closure.CompiledWorkflow.Primary.Template.Metadata.QualityOfService.GetTier() + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName()) + qualityOfServiceTier = input.Workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetMetadata().GetQualityOfService().GetTier() } } @@ -171,10 +170,10 @@ func (q qualityOfServiceAllocator) GetQualityOfService(ctx context.Context, inpu // set, use the default values from the admin application config. if qualityOfServiceTier == core.QualityOfService_UNDEFINED { logger.Debugf(ctx, "Determining quality of service tier from application config override for [%s/%s/%s]", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name) + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName()) var ok bool - qualityOfServiceTier, ok = q.config.QualityOfServiceConfiguration().GetDefaultTiers()[input.ExecutionCreateRequest.Domain] + qualityOfServiceTier, ok = q.config.QualityOfServiceConfiguration().GetDefaultTiers()[input.ExecutionCreateRequest.GetDomain()] if !ok { // No queueing budget to set when no default is specified return QualityOfServiceSpec{}, nil @@ -186,10 +185,10 @@ func (q qualityOfServiceAllocator) GetQualityOfService(ctx context.Context, inpu return QualityOfServiceSpec{}, nil } logger.Debugf(ctx, "Determining quality of service spec from application config override for [%s/%s/%s] with tier [%v]", - input.ExecutionCreateRequest.Project, input.ExecutionCreateRequest.Domain, - input.ExecutionCreateRequest.Name, qualityOfServiceTier) + input.ExecutionCreateRequest.GetProject(), input.ExecutionCreateRequest.GetDomain(), + input.ExecutionCreateRequest.GetName(), qualityOfServiceTier) // Config values should always be vetted so there's no need to check the error from conversion. - duration, _ := ptypes.Duration(executionValues.QueueingBudget) + duration, _ := ptypes.Duration(executionValues.GetQueueingBudget()) return QualityOfServiceSpec{ QueuingBudget: duration, diff --git a/flyteadmin/pkg/manager/impl/executions/quality_of_service_test.go b/flyteadmin/pkg/manager/impl/executions/quality_of_service_test.go index 41a04ec2bc..0ad76cd3c7 100644 --- a/flyteadmin/pkg/manager/impl/executions/quality_of_service_test.go +++ b/flyteadmin/pkg/manager/impl/executions/quality_of_service_test.go @@ -63,9 +63,9 @@ func addGetResourceFunc(t *testing.T, resourceManager interfaces.ResourceInterfa resourceManager.(*managerMocks.MockResourceManager).GetResourceFunc = func(ctx context.Context, request interfaces.ResourceRequest) (*interfaces.ResourceResponse, error) { assert.EqualValues(t, request, interfaces.ResourceRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, - Workflow: workflowIdentifier.Name, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), + Workflow: workflowIdentifier.GetName(), ResourceType: admin.MatchableResource_QUALITY_OF_SERVICE_SPECIFICATION, }) return &interfaces.ResourceResponse{ diff --git a/flyteadmin/pkg/manager/impl/executions/queues.go b/flyteadmin/pkg/manager/impl/executions/queues.go index 90a5951a33..2064626717 100644 --- a/flyteadmin/pkg/manager/impl/executions/queues.go +++ b/flyteadmin/pkg/manager/impl/executions/queues.go @@ -59,9 +59,9 @@ func (q *queueAllocatorImpl) GetQueue(ctx context.Context, identifier *core.Iden q.refreshExecutionQueues(executionQueues) resource, err := q.resourceManager.GetResource(ctx, interfaces.ResourceRequest{ - Project: identifier.Project, - Domain: identifier.Domain, - Workflow: identifier.Name, + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Workflow: identifier.GetName(), ResourceType: admin.MatchableResource_EXECUTION_QUEUE, }) @@ -71,7 +71,7 @@ func (q *queueAllocatorImpl) GetQueue(ctx context.Context, identifier *core.Iden } if resource != nil && resource.Attributes != nil && resource.Attributes.GetExecutionQueueAttributes() != nil { - for _, tag := range resource.Attributes.GetExecutionQueueAttributes().Tags { + for _, tag := range resource.Attributes.GetExecutionQueueAttributes().GetTags() { matches, ok := q.queueConfigMap[tag] if !ok { continue @@ -84,7 +84,7 @@ func (q *queueAllocatorImpl) GetQueue(ctx context.Context, identifier *core.Iden var defaultTags []string // If we've made it this far, check to see if a domain-specific default workflow config exists for this particular domain. for _, workflowConfig := range q.config.QueueConfiguration().GetWorkflowConfigs() { - if workflowConfig.Domain == identifier.Domain { + if workflowConfig.Domain == identifier.GetDomain() { tags = workflowConfig.Tags } else if len(workflowConfig.Domain) == 0 { defaultTags = workflowConfig.Tags diff --git a/flyteadmin/pkg/manager/impl/launch_plan_manager.go b/flyteadmin/pkg/manager/impl/launch_plan_manager.go index 74f0571f86..b1d0d8d56d 100644 --- a/flyteadmin/pkg/manager/impl/launch_plan_manager.go +++ b/flyteadmin/pkg/manager/impl/launch_plan_manager.go @@ -41,13 +41,13 @@ type LaunchPlanManager struct { } func getLaunchPlanContext(ctx context.Context, identifier *core.Identifier) context.Context { - ctx = contextutils.WithProjectDomain(ctx, identifier.Project, identifier.Domain) - return contextutils.WithLaunchPlanID(ctx, identifier.Name) + ctx = contextutils.WithProjectDomain(ctx, identifier.GetProject(), identifier.GetDomain()) + return contextutils.WithLaunchPlanID(ctx, identifier.GetName()) } func (m *LaunchPlanManager) getNamedEntityContext(ctx context.Context, identifier *admin.NamedEntityIdentifier) context.Context { - ctx = contextutils.WithProjectDomain(ctx, identifier.Project, identifier.Domain) - return contextutils.WithLaunchPlanID(ctx, identifier.Name) + ctx = contextutils.WithProjectDomain(ctx, identifier.GetProject(), identifier.GetDomain()) + return contextutils.WithLaunchPlanID(ctx, identifier.GetName()) } func (m *LaunchPlanManager) CreateLaunchPlan( @@ -57,35 +57,35 @@ func (m *LaunchPlanManager) CreateLaunchPlan( logger.Debugf(ctx, "Failed to validate provided workflow ID for CreateLaunchPlan with err: %v", err) return nil, err } - workflowModel, err := util.GetWorkflowModel(ctx, m.db, request.Spec.WorkflowId) + workflowModel, err := util.GetWorkflowModel(ctx, m.db, request.GetSpec().GetWorkflowId()) if err != nil { logger.Debugf(ctx, "Failed to get workflow with id [%+v] for CreateLaunchPlan with id [%+v] with err %v", - request.Spec.WorkflowId, request.Id) + request.GetSpec().GetWorkflowId(), request.GetId()) return nil, err } var workflowInterface core.TypedInterface - if workflowModel.TypedInterface != nil && len(workflowModel.TypedInterface) > 0 { + if len(workflowModel.TypedInterface) > 0 { err = proto.Unmarshal(workflowModel.TypedInterface, &workflowInterface) if err != nil { logger.Errorf(ctx, "Failed to unmarshal TypedInterface for workflow [%+v] with err: %v", - request.Spec.WorkflowId, err) + request.GetSpec().GetWorkflowId(), err) return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal workflow inputs") } } if err := validation.ValidateLaunchPlan(ctx, request, m.db, m.config.ApplicationConfiguration(), &workflowInterface); err != nil { - logger.Debugf(ctx, "could not create launch plan: %+v, request failed validation with err: %v", request.Id, err) + logger.Debugf(ctx, "could not create launch plan: %+v, request failed validation with err: %v", request.GetId(), err) return nil, err } - ctx = getLaunchPlanContext(ctx, request.Id) - launchPlan := transformers.CreateLaunchPlan(request, workflowInterface.Outputs) + ctx = getLaunchPlanContext(ctx, request.GetId()) + launchPlan := transformers.CreateLaunchPlan(request, workflowInterface.GetOutputs()) launchPlanDigest, err := util.GetLaunchPlanDigest(ctx, launchPlan) if err != nil { - logger.Errorf(ctx, "failed to compute launch plan digest for [%+v] with err: %v", launchPlan.Id, err) + logger.Errorf(ctx, "failed to compute launch plan digest for [%+v] with err: %v", launchPlan.GetId(), err) return nil, err } - existingLaunchPlanModel, err := util.GetLaunchPlanModel(ctx, m.db, request.Id) + existingLaunchPlanModel, err := util.GetLaunchPlanModel(ctx, m.db, request.GetId()) if err == nil { if bytes.Equal(existingLaunchPlanModel.Digest, launchPlanDigest) { return nil, errors.NewLaunchPlanExistsIdenticalStructureError(ctx, request) @@ -96,7 +96,7 @@ func (m *LaunchPlanManager) CreateLaunchPlan( return nil, transformerErr } // A launch plan exists with different structure - return nil, errors.NewLaunchPlanExistsDifferentStructureError(ctx, request, existingLaunchPlan.Spec, launchPlan.Spec) + return nil, errors.NewLaunchPlanExistsDifferentStructureError(ctx, request, existingLaunchPlan.GetSpec(), launchPlan.GetSpec()) } launchPlanModel, err := @@ -104,12 +104,12 @@ func (m *LaunchPlanManager) CreateLaunchPlan( if err != nil { logger.Errorf(ctx, "Failed to transform launch plan model [%+v], and workflow outputs [%+v] with err: %v", - request, workflowInterface.Outputs, err) + request, workflowInterface.GetOutputs(), err) return nil, err } err = m.db.LaunchPlanRepo().Create(ctx, launchPlanModel) if err != nil { - logger.Errorf(ctx, "Failed to save launch plan model %+v with err: %v", request.Id, err) + logger.Errorf(ctx, "Failed to save launch plan model %+v with err: %v", request.GetId(), err) return nil, err } m.metrics.SpecSizeBytes.Observe(float64(len(launchPlanModel.Spec))) @@ -143,7 +143,7 @@ func isScheduleEmpty(launchPlanSpec *admin.LaunchPlanSpec) bool { if schedule == nil { return true } - if schedule.GetCronSchedule() != nil && len(schedule.GetCronSchedule().Schedule) != 0 { + if schedule.GetCronSchedule() != nil && len(schedule.GetCronSchedule().GetSchedule()) != 0 { return false } if len(schedule.GetCronExpression()) != 0 { @@ -160,7 +160,7 @@ func (m *LaunchPlanManager) enableSchedule(ctx context.Context, launchPlanIdenti addScheduleInput, err := m.scheduler.CreateScheduleInput(ctx, m.config.ApplicationConfiguration().GetSchedulerConfig(), launchPlanIdentifier, - launchPlanSpec.EntityMetadata.Schedule) + launchPlanSpec.GetEntityMetadata().GetSchedule()) if err != nil { return err } @@ -223,30 +223,30 @@ func (m *LaunchPlanManager) updateSchedules( func (m *LaunchPlanManager) disableLaunchPlan(ctx context.Context, request *admin.LaunchPlanUpdateRequest) ( *admin.LaunchPlanUpdateResponse, error) { - if err := validation.ValidateIdentifier(request.Id, common.LaunchPlan); err != nil { - logger.Debugf(ctx, "can't disable launch plan [%+v] with invalid identifier: %v", request.Id, err) + if err := validation.ValidateIdentifier(request.GetId(), common.LaunchPlan); err != nil { + logger.Debugf(ctx, "can't disable launch plan [%+v] with invalid identifier: %v", request.GetId(), err) return nil, err } - launchPlanModel, err := util.GetLaunchPlanModel(ctx, m.db, request.Id) + launchPlanModel, err := util.GetLaunchPlanModel(ctx, m.db, request.GetId()) if err != nil { - logger.Debugf(ctx, "couldn't find launch plan [%+v] to disable with err: %v", request.Id, err) + logger.Debugf(ctx, "couldn't find launch plan [%+v] to disable with err: %v", request.GetId(), err) return nil, err } err = m.updateLaunchPlanModelState(&launchPlanModel, admin.LaunchPlanState_INACTIVE) if err != nil { - logger.Debugf(ctx, "failed to disable launch plan [%+v] with err: %v", request.Id, err) + logger.Debugf(ctx, "failed to disable launch plan [%+v] with err: %v", request.GetId(), err) return nil, err } var launchPlanSpec admin.LaunchPlanSpec err = proto.Unmarshal(launchPlanModel.Spec, &launchPlanSpec) if err != nil { - logger.Errorf(ctx, "failed to unmarshal launch plan spec when disabling schedule for %+v", request.Id) + logger.Errorf(ctx, "failed to unmarshal launch plan spec when disabling schedule for %+v", request.GetId()) return nil, errors.NewFlyteAdminErrorf(codes.Internal, - "failed to unmarshal launch plan spec when disabling schedule for %+v", request.Id) + "failed to unmarshal launch plan spec when disabling schedule for %+v", request.GetId()) } - if launchPlanSpec.EntityMetadata != nil && launchPlanSpec.EntityMetadata.Schedule != nil { + if launchPlanSpec.GetEntityMetadata() != nil && launchPlanSpec.GetEntityMetadata().GetSchedule() != nil { err = m.disableSchedule(ctx, &core.Identifier{ Project: launchPlanModel.Project, Domain: launchPlanModel.Domain, @@ -259,23 +259,23 @@ func (m *LaunchPlanManager) disableLaunchPlan(ctx context.Context, request *admi } err = m.db.LaunchPlanRepo().Update(ctx, launchPlanModel) if err != nil { - logger.Debugf(ctx, "Failed to update launchPlanModel with ID [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to update launchPlanModel with ID [%+v] with err %v", request.GetId(), err) return nil, err } - logger.Debugf(ctx, "disabled launch plan: [%+v]", request.Id) + logger.Debugf(ctx, "disabled launch plan: [%+v]", request.GetId()) return &admin.LaunchPlanUpdateResponse{}, nil } func (m *LaunchPlanManager) enableLaunchPlan(ctx context.Context, request *admin.LaunchPlanUpdateRequest) ( *admin.LaunchPlanUpdateResponse, error) { newlyActiveLaunchPlanModel, err := m.db.LaunchPlanRepo().Get(ctx, repoInterfaces.Identifier{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, - Version: request.Id.Version, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), + Version: request.GetId().GetVersion(), }) if err != nil { - logger.Debugf(ctx, "Failed to find launch plan to enable with id [%+v] and err %v", request.Id, err) + logger.Debugf(ctx, "Failed to find launch plan to enable with id [%+v] and err %v", request.GetId(), err) return nil, err } // Set desired launch plan version to active: @@ -298,13 +298,12 @@ func (m *LaunchPlanManager) enableLaunchPlan(ctx context.Context, request *admin // Not found is fine, there isn't always a guaranteed active launch plan model. if err.(errors.FlyteAdminError).Code() != codes.NotFound { logger.Infof(ctx, "Failed to search for an active launch plan model with project: %s, domain: %s, name: %s and err %v", - request.Id.Project, request.Id.Domain, request.Id.Name, err) + request.GetId().GetProject(), request.GetId().GetDomain(), request.GetId().GetName(), err) return nil, err } logger.Debugf(ctx, "No active launch plan model found to disable with project: %s, domain: %s, name: %s", - request.Id.Project, request.Id.Domain, request.Id.Name) - } else if formerlyActiveLaunchPlanModelOutput.LaunchPlans != nil && - len(formerlyActiveLaunchPlanModelOutput.LaunchPlans) > 0 { + request.GetId().GetProject(), request.GetId().GetDomain(), request.GetId().GetName()) + } else if len(formerlyActiveLaunchPlanModelOutput.LaunchPlans) > 0 { formerlyActiveLaunchPlanModel = &formerlyActiveLaunchPlanModelOutput.LaunchPlans[0] err = m.updateLaunchPlanModelState(formerlyActiveLaunchPlanModel, admin.LaunchPlanState_INACTIVE) if err != nil { @@ -322,7 +321,7 @@ func (m *LaunchPlanManager) enableLaunchPlan(ctx context.Context, request *admin err = m.db.LaunchPlanRepo().SetActive(ctx, newlyActiveLaunchPlanModel, formerlyActiveLaunchPlanModel) if err != nil { logger.Debugf(ctx, - "Failed to set launchPlanModel with ID [%+v] to active with err %v", request.Id, err) + "Failed to set launchPlanModel with ID [%+v] to active with err %v", request.GetId(), err) return nil, err } return &admin.LaunchPlanUpdateResponse{}, nil @@ -331,11 +330,11 @@ func (m *LaunchPlanManager) enableLaunchPlan(ctx context.Context, request *admin func (m *LaunchPlanManager) UpdateLaunchPlan(ctx context.Context, request *admin.LaunchPlanUpdateRequest) ( *admin.LaunchPlanUpdateResponse, error) { - if err := validation.ValidateIdentifier(request.Id, common.LaunchPlan); err != nil { - logger.Debugf(ctx, "can't update launch plan [%+v] state, invalid identifier: %v", request.Id, err) + if err := validation.ValidateIdentifier(request.GetId(), common.LaunchPlan); err != nil { + logger.Debugf(ctx, "can't update launch plan [%+v] state, invalid identifier: %v", request.GetId(), err) } - ctx = getLaunchPlanContext(ctx, request.Id) - switch request.State { + ctx = getLaunchPlanContext(ctx, request.GetId()) + switch request.GetState() { case admin.LaunchPlanState_INACTIVE: return m.disableLaunchPlan(ctx, request) case admin.LaunchPlanState_ACTIVE: @@ -343,29 +342,29 @@ func (m *LaunchPlanManager) UpdateLaunchPlan(ctx context.Context, request *admin default: return nil, errors.NewFlyteAdminErrorf( codes.InvalidArgument, "Unrecognized launch plan state %v for update for launch plan [%+v]", - request.State, request.Id) + request.GetState(), request.GetId()) } } func (m *LaunchPlanManager) GetLaunchPlan(ctx context.Context, request *admin.ObjectGetRequest) ( *admin.LaunchPlan, error) { - if err := validation.ValidateIdentifier(request.Id, common.LaunchPlan); err != nil { - logger.Debugf(ctx, "can't get launch plan [%+v] with invalid identifier: %v", request.Id, err) + if err := validation.ValidateIdentifier(request.GetId(), common.LaunchPlan); err != nil { + logger.Debugf(ctx, "can't get launch plan [%+v] with invalid identifier: %v", request.GetId(), err) return nil, err } - ctx = getLaunchPlanContext(ctx, request.Id) - return util.GetLaunchPlan(ctx, m.db, request.Id) + ctx = getLaunchPlanContext(ctx, request.GetId()) + return util.GetLaunchPlan(ctx, m.db, request.GetId()) } func (m *LaunchPlanManager) GetActiveLaunchPlan(ctx context.Context, request *admin.ActiveLaunchPlanRequest) ( *admin.LaunchPlan, error) { if err := validation.ValidateActiveLaunchPlanRequest(request); err != nil { - logger.Debugf(ctx, "can't get active launch plan [%+v] with invalid request: %v", request.Id, err) + logger.Debugf(ctx, "can't get active launch plan [%+v] with invalid request: %v", request.GetId(), err) return nil, err } - ctx = m.getNamedEntityContext(ctx, request.Id) + ctx = m.getNamedEntityContext(ctx, request.GetId()) - filters, err := util.GetActiveLaunchPlanVersionFilters(request.Id.Project, request.Id.Domain, request.Id.Name) + filters, err := util.GetActiveLaunchPlanVersionFilters(request.GetId().GetProject(), request.GetId().GetDomain(), request.GetId().GetName()) if err != nil { return nil, err } @@ -383,7 +382,7 @@ func (m *LaunchPlanManager) GetActiveLaunchPlan(ctx context.Context, request *ad } if len(output.LaunchPlans) != 1 { - return nil, errors.NewFlyteAdminErrorf(codes.NotFound, "No active launch plan could be found: %s:%s:%s", request.Id.Project, request.Id.Domain, request.Id.Name) + return nil, errors.NewFlyteAdminErrorf(codes.NotFound, "No active launch plan could be found: %s:%s:%s", request.GetId().GetProject(), request.GetId().GetDomain(), request.GetId().GetName()) } return transformers.FromLaunchPlanModel(output.LaunchPlans[0]) @@ -397,30 +396,30 @@ func (m *LaunchPlanManager) ListLaunchPlans(ctx context.Context, request *admin. logger.Debugf(ctx, "") return nil, err } - ctx = m.getNamedEntityContext(ctx, request.Id) + ctx = m.getNamedEntityContext(ctx, request.GetId()) filters, err := util.GetDbFilters(util.FilterSpec{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, - RequestFilters: request.Filters, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), + RequestFilters: request.GetFilters(), }, common.LaunchPlan) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.LaunchPlanColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.LaunchPlanColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListLaunchPlans", request.Token) + "invalid pagination token %s for ListLaunchPlans", request.GetToken()) } listLaunchPlansInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, @@ -438,7 +437,7 @@ func (m *LaunchPlanManager) ListLaunchPlans(ctx context.Context, request *admin. return nil, err } var token string - if len(output.LaunchPlans) == int(request.Limit) { + if len(output.LaunchPlans) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(output.LaunchPlans)) } return &admin.LaunchPlanList{ @@ -455,25 +454,25 @@ func (m *LaunchPlanManager) ListActiveLaunchPlans(ctx context.Context, request * logger.Debugf(ctx, "") return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Project, request.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetProject(), request.GetDomain()) - filters, err := util.ListActiveLaunchPlanVersionsFilters(request.Project, request.Domain) + filters, err := util.ListActiveLaunchPlanVersionsFilters(request.GetProject(), request.GetDomain()) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.LaunchPlanColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.LaunchPlanColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListActiveLaunchPlans", request.Token) + "invalid pagination token %s for ListActiveLaunchPlans", request.GetToken()) } listLaunchPlansInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, @@ -491,7 +490,7 @@ func (m *LaunchPlanManager) ListActiveLaunchPlans(ctx context.Context, request * return nil, err } var token string - if len(output.LaunchPlans) == int(request.Limit) { + if len(output.LaunchPlans) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(output.LaunchPlans)) } return &admin.LaunchPlanList{ @@ -503,26 +502,26 @@ func (m *LaunchPlanManager) ListActiveLaunchPlans(ctx context.Context, request * // At least project name and domain must be specified along with limit. func (m *LaunchPlanManager) ListLaunchPlanIds(ctx context.Context, request *admin.NamedEntityIdentifierListRequest) ( *admin.NamedEntityIdentifierList, error) { - ctx = contextutils.WithProjectDomain(ctx, request.Project, request.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetProject(), request.GetDomain()) filters, err := util.GetDbFilters(util.FilterSpec{ - Project: request.Project, - Domain: request.Domain, + Project: request.GetProject(), + Domain: request.GetDomain(), }, common.LaunchPlan) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.LaunchPlanColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.LaunchPlanColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { - return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid pagination token %s", request.Token) + return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid pagination token %s", request.GetToken()) } listLaunchPlansInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, @@ -535,7 +534,7 @@ func (m *LaunchPlanManager) ListLaunchPlanIds(ctx context.Context, request *admi return nil, err } var token string - if len(output.LaunchPlans) == int(request.Limit) { + if len(output.LaunchPlans) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(output.LaunchPlans)) } return &admin.NamedEntityIdentifierList{ diff --git a/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go b/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go index d40d7c5e1f..3d551c4bc6 100644 --- a/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go +++ b/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go @@ -59,7 +59,7 @@ func getMockConfigForLpTest() runtimeInterfaces.Configuration { func setDefaultWorkflowCallbackForLpTest(repository interfaces.Repository) { workflowSpec := testutils.GetSampleWorkflowSpecForTest() - typedInterface, _ := proto.Marshal(workflowSpec.Template.Interface) + typedInterface, _ := proto.Marshal(workflowSpec.GetTemplate().GetInterface()) workflowGetFunc := func(input interfaces.Identifier) (models.Workflow, error) { return models.Workflow{ WorkflowKey: models.WorkflowKey{ @@ -107,10 +107,10 @@ func TestLaunchPlanManager_GetLaunchPlan(t *testing.T) { workflowRequest := testutils.GetWorkflowRequest() closure := admin.LaunchPlanClosure{ - ExpectedInputs: lpRequest.Spec.DefaultInputs, - ExpectedOutputs: workflowRequest.Spec.Template.Interface.Outputs, + ExpectedInputs: lpRequest.GetSpec().GetDefaultInputs(), + ExpectedOutputs: workflowRequest.GetSpec().GetTemplate().GetInterface().GetOutputs(), } - specBytes, _ := proto.Marshal(lpRequest.Spec) + specBytes, _ := proto.Marshal(lpRequest.GetSpec()) closureBytes, _ := proto.Marshal(&closure) launchPlanGetFunc := func(input interfaces.Identifier) (models.LaunchPlan, error) { @@ -143,10 +143,10 @@ func TestLaunchPlanManager_GetActiveLaunchPlan(t *testing.T) { workflowRequest := testutils.GetWorkflowRequest() closure := admin.LaunchPlanClosure{ - ExpectedInputs: lpRequest.Spec.DefaultInputs, - ExpectedOutputs: workflowRequest.Spec.Template.Interface.Outputs, + ExpectedInputs: lpRequest.GetSpec().GetDefaultInputs(), + ExpectedOutputs: workflowRequest.GetSpec().GetTemplate().GetInterface().GetOutputs(), } - specBytes, _ := proto.Marshal(lpRequest.Spec) + specBytes, _ := proto.Marshal(lpRequest.GetSpec()) closureBytes, _ := proto.Marshal(&closure) launchPlanListFunc := func(input interfaces.ListResourceInput) (interfaces.LaunchPlanCollectionOutput, error) { @@ -169,10 +169,10 @@ func TestLaunchPlanManager_GetActiveLaunchPlan(t *testing.T) { LaunchPlans: []models.LaunchPlan{ { LaunchPlanKey: models.LaunchPlanKey{ - Project: lpRequest.Id.Project, - Domain: lpRequest.Id.Domain, - Name: lpRequest.Id.Name, - Version: lpRequest.Id.Version, + Project: lpRequest.GetId().GetProject(), + Domain: lpRequest.GetId().GetDomain(), + Name: lpRequest.GetId().GetName(), + Version: lpRequest.GetId().GetVersion(), }, Spec: specBytes, Closure: closureBytes, @@ -185,9 +185,9 @@ func TestLaunchPlanManager_GetActiveLaunchPlan(t *testing.T) { repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetListCallback(launchPlanListFunc) response, err := lpManager.GetActiveLaunchPlan(context.Background(), &admin.ActiveLaunchPlanRequest{ Id: &admin.NamedEntityIdentifier{ - Project: lpRequest.Id.Project, - Domain: lpRequest.Id.Domain, - Name: lpRequest.Id.Name, + Project: lpRequest.GetId().GetProject(), + Domain: lpRequest.GetId().GetDomain(), + Name: lpRequest.GetId().GetName(), }, }) assert.NoError(t, err) @@ -205,9 +205,9 @@ func TestLaunchPlanManager_GetActiveLaunchPlan_NoneActive(t *testing.T) { repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetListCallback(launchPlanListFunc) response, err := lpManager.GetActiveLaunchPlan(context.Background(), &admin.ActiveLaunchPlanRequest{ Id: &admin.NamedEntityIdentifier{ - Project: lpRequest.Id.Project, - Domain: lpRequest.Id.Domain, - Name: lpRequest.Id.Name, + Project: lpRequest.GetId().GetProject(), + Domain: lpRequest.GetId().GetDomain(), + Name: lpRequest.GetId().GetName(), }, }) assert.EqualError(t, err, "No active launch plan could be found: project:domain:name") @@ -298,11 +298,11 @@ func TestCreateLaunchPlanValidateCreate(t *testing.T) { setDefaultWorkflowCallbackForLpTest(repository) lpCreateFunc := func(input models.LaunchPlan) error { launchPlan, _ := transformers.FromLaunchPlanModel(input) - assert.Equal(t, project, launchPlan.Id.Project) - assert.Equal(t, domain, launchPlan.Id.Domain) - assert.Equal(t, name, launchPlan.Id.Name) - assert.Equal(t, version, launchPlan.Id.Version) - assert.True(t, proto.Equal(testutils.GetLaunchPlanRequest().Spec, launchPlan.Spec)) + assert.Equal(t, project, launchPlan.GetId().GetProject()) + assert.Equal(t, domain, launchPlan.GetId().GetDomain()) + assert.Equal(t, name, launchPlan.GetId().GetName()) + assert.Equal(t, version, launchPlan.GetId().GetVersion()) + assert.True(t, proto.Equal(testutils.GetLaunchPlanRequest().GetSpec(), launchPlan.GetSpec())) expectedInputs := &core.ParameterMap{ Parameters: map[string]*core.Parameter{ "foo": { @@ -315,9 +315,9 @@ func TestCreateLaunchPlanValidateCreate(t *testing.T) { }, }, } - assert.True(t, proto.Equal(expectedInputs, launchPlan.Closure.ExpectedInputs)) - assert.True(t, proto.Equal(testutils.GetSampleWorkflowSpecForTest().Template.Interface.Outputs, - launchPlan.Closure.ExpectedOutputs)) + assert.True(t, proto.Equal(expectedInputs, launchPlan.GetClosure().GetExpectedInputs())) + assert.True(t, proto.Equal(testutils.GetSampleWorkflowSpecForTest().GetTemplate().GetInterface().GetOutputs(), + launchPlan.GetClosure().GetExpectedOutputs())) return nil } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetCreateCallback(lpCreateFunc) @@ -350,15 +350,15 @@ func TestCreateLaunchPlanNoWorkflowInterface(t *testing.T) { repository.WorkflowRepo().(*repositoryMocks.MockWorkflowRepo).SetGetCallback(workflowGetFunc) lpCreateFunc := func(input models.LaunchPlan) error { launchPlan, _ := transformers.FromLaunchPlanModel(input) - assert.Equal(t, project, launchPlan.Id.Project) - assert.Equal(t, domain, launchPlan.Id.Domain) - assert.Equal(t, name, launchPlan.Id.Name) - assert.Equal(t, version, launchPlan.Id.Version) - expectedLaunchPlanSpec := testutils.GetLaunchPlanRequest().Spec + assert.Equal(t, project, launchPlan.GetId().GetProject()) + assert.Equal(t, domain, launchPlan.GetId().GetDomain()) + assert.Equal(t, name, launchPlan.GetId().GetName()) + assert.Equal(t, version, launchPlan.GetId().GetVersion()) + expectedLaunchPlanSpec := testutils.GetLaunchPlanRequest().GetSpec() expectedLaunchPlanSpec.FixedInputs = nil expectedLaunchPlanSpec.DefaultInputs.Parameters = map[string]*core.Parameter{} - assert.EqualValues(t, expectedLaunchPlanSpec.String(), launchPlan.Spec.String()) - assert.Empty(t, launchPlan.Closure.ExpectedInputs) + assert.EqualValues(t, expectedLaunchPlanSpec.String(), launchPlan.GetSpec().String()) + assert.Empty(t, launchPlan.GetClosure().GetExpectedInputs()) return nil } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetCreateCallback(lpCreateFunc) @@ -1058,10 +1058,10 @@ func TestLaunchPlanManager_ListLaunchPlans(t *testing.T) { workflowRequest := testutils.GetWorkflowRequest() closure := admin.LaunchPlanClosure{ - ExpectedInputs: lpRequest.Spec.DefaultInputs, - ExpectedOutputs: workflowRequest.Spec.Template.Interface.Outputs, + ExpectedInputs: lpRequest.GetSpec().GetDefaultInputs(), + ExpectedOutputs: workflowRequest.GetSpec().GetTemplate().GetInterface().GetOutputs(), } - specBytes, _ := proto.Marshal(lpRequest.Spec) + specBytes, _ := proto.Marshal(lpRequest.GetSpec()) closureBytes, _ := proto.Marshal(&closure) createdAt := time.Now() @@ -1146,14 +1146,14 @@ func TestLaunchPlanManager_ListLaunchPlans(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, 2, len(lpList.LaunchPlans)) - for idx, lp := range lpList.LaunchPlans { - assert.Equal(t, project, lp.Id.Project) - assert.Equal(t, domain, lp.Id.Domain) - assert.Equal(t, name, lp.Id.Name) - assert.Equal(t, fmt.Sprintf("%v", idx+1), lp.Id.Version) - assert.True(t, proto.Equal(createdAtProto, lp.Closure.CreatedAt)) - assert.True(t, proto.Equal(updatedAtProto, lp.Closure.UpdatedAt)) + assert.Equal(t, 2, len(lpList.GetLaunchPlans())) + for idx, lp := range lpList.GetLaunchPlans() { + assert.Equal(t, project, lp.GetId().GetProject()) + assert.Equal(t, domain, lp.GetId().GetDomain()) + assert.Equal(t, name, lp.GetId().GetName()) + assert.Equal(t, fmt.Sprintf("%v", idx+1), lp.GetId().GetVersion()) + assert.True(t, proto.Equal(createdAtProto, lp.GetClosure().GetCreatedAt())) + assert.True(t, proto.Equal(updatedAtProto, lp.GetClosure().GetUpdatedAt())) } } @@ -1165,10 +1165,10 @@ func TestLaunchPlanManager_ListLaunchPlanIds(t *testing.T) { workflowRequest := testutils.GetWorkflowRequest() closure := admin.LaunchPlanClosure{ - ExpectedInputs: lpRequest.Spec.DefaultInputs, - ExpectedOutputs: workflowRequest.Spec.Template.Interface.Outputs, + ExpectedInputs: lpRequest.GetSpec().GetDefaultInputs(), + ExpectedOutputs: workflowRequest.GetSpec().GetTemplate().GetInterface().GetOutputs(), } - specBytes, _ := proto.Marshal(lpRequest.Spec) + specBytes, _ := proto.Marshal(lpRequest.GetSpec()) closureBytes, _ := proto.Marshal(&closure) launchPlanListFunc := func(input interfaces.ListResourceInput) ( @@ -1232,11 +1232,11 @@ func TestLaunchPlanManager_ListLaunchPlanIds(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, 2, len(lpList.Entities)) - for _, id := range lpList.Entities { - assert.Equal(t, project, id.Project) - assert.Equal(t, domain, id.Domain) - assert.Equal(t, name, id.Name) + assert.Equal(t, 2, len(lpList.GetEntities())) + for _, id := range lpList.GetEntities() { + assert.Equal(t, project, id.GetProject()) + assert.Equal(t, domain, id.GetDomain()) + assert.Equal(t, name, id.GetName()) } } @@ -1248,10 +1248,10 @@ func TestLaunchPlanManager_ListActiveLaunchPlans(t *testing.T) { workflowRequest := testutils.GetWorkflowRequest() closure := admin.LaunchPlanClosure{ - ExpectedInputs: lpRequest.Spec.DefaultInputs, - ExpectedOutputs: workflowRequest.Spec.Template.Interface.Outputs, + ExpectedInputs: lpRequest.GetSpec().GetDefaultInputs(), + ExpectedOutputs: workflowRequest.GetSpec().GetTemplate().GetInterface().GetOutputs(), } - specBytes, _ := proto.Marshal(lpRequest.Spec) + specBytes, _ := proto.Marshal(lpRequest.GetSpec()) closureBytes, _ := proto.Marshal(&closure) launchPlanListFunc := func(input interfaces.ListResourceInput) ( @@ -1319,11 +1319,11 @@ func TestLaunchPlanManager_ListActiveLaunchPlans(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, 2, len(lpList.LaunchPlans)) - for _, id := range lpList.LaunchPlans { - assert.Equal(t, project, id.Id.Project) - assert.Equal(t, domain, id.Id.Domain) - assert.Equal(t, name, id.Id.Name) + assert.Equal(t, 2, len(lpList.GetLaunchPlans())) + for _, id := range lpList.GetLaunchPlans() { + assert.Equal(t, project, id.GetId().GetProject()) + assert.Equal(t, domain, id.GetId().GetDomain()) + assert.Equal(t, name, id.GetId().GetName()) } } diff --git a/flyteadmin/pkg/manager/impl/metrics_manager.go b/flyteadmin/pkg/manager/impl/metrics_manager.go index 231909f4e8..a689c60a70 100644 --- a/flyteadmin/pkg/manager/impl/metrics_manager.go +++ b/flyteadmin/pkg/manager/impl/metrics_manager.go @@ -70,18 +70,18 @@ func createOperationSpan(startTime, endTime *timestamp.Timestamp, operation stri // getBranchNode searches the provided BranchNode definition for the Node identified by nodeID. func getBranchNode(nodeID string, branchNode *core.BranchNode) *core.Node { - if branchNode.IfElse.Case.ThenNode.Id == nodeID { - return branchNode.IfElse.Case.ThenNode + if branchNode.GetIfElse().GetCase().GetThenNode().GetId() == nodeID { + return branchNode.GetIfElse().GetCase().GetThenNode() } - for _, other := range branchNode.IfElse.Other { - if other.ThenNode.Id == nodeID { - return other.ThenNode + for _, other := range branchNode.GetIfElse().GetOther() { + if other.GetThenNode().GetId() == nodeID { + return other.GetThenNode() } } - if elseNode, ok := branchNode.IfElse.Default.(*core.IfElseBlock_ElseNode); ok { - if elseNode.ElseNode.Id == nodeID { + if elseNode, ok := branchNode.GetIfElse().GetDefault().(*core.IfElseBlock_ElseNode); ok { + if elseNode.ElseNode.GetId() == nodeID { return elseNode.ElseNode } } @@ -98,13 +98,13 @@ func (m *MetricsManager) getLatestUpstreamNodeExecution(nodeID string, upstreamN var nodeExecution *admin.NodeExecution var latestUpstreamUpdatedAt = time.Unix(0, 0) if connectionSet, exists := upstreamNodeIds[nodeID]; exists { - for _, upstreamNodeID := range connectionSet.Ids { + for _, upstreamNodeID := range connectionSet.GetIds() { upstreamNodeExecution, exists := nodeExecutions[upstreamNodeID] if !exists { continue } - t := upstreamNodeExecution.Closure.UpdatedAt.AsTime() + t := upstreamNodeExecution.GetClosure().GetUpdatedAt().AsTime() if t.After(latestUpstreamUpdatedAt) { nodeExecution = upstreamNodeExecution latestUpstreamUpdatedAt = t @@ -124,15 +124,15 @@ func (m *MetricsManager) getNodeExecutions(ctx context.Context, request *admin.N return nil, err } - for _, nodeExecution := range response.NodeExecutions { - nodeExecutions[nodeExecution.Metadata.SpecNodeId] = nodeExecution + for _, nodeExecution := range response.GetNodeExecutions() { + nodeExecutions[nodeExecution.GetMetadata().GetSpecNodeId()] = nodeExecution } - if len(response.NodeExecutions) < int(request.Limit) { + if len(response.GetNodeExecutions()) < int(request.GetLimit()) { break } - request.Token = response.Token + request.Token = response.GetToken() } return nodeExecutions, nil @@ -147,13 +147,13 @@ func (m *MetricsManager) getTaskExecutions(ctx context.Context, request *admin.T return nil, err } - taskExecutions = append(taskExecutions, response.TaskExecutions...) + taskExecutions = append(taskExecutions, response.GetTaskExecutions()...) - if len(response.TaskExecutions) < int(request.Limit) { + if len(response.GetTaskExecutions()) < int(request.GetLimit()) { break } - request.Token = response.Token + request.Token = response.GetToken() } return taskExecutions, nil @@ -166,9 +166,9 @@ func (m *MetricsManager) parseBranchNodeExecution(ctx context.Context, // retrieve node execution(s) nodeExecutions, err := m.getNodeExecutions(ctx, &admin.NodeExecutionListRequest{ - WorkflowExecutionId: nodeExecution.Id.ExecutionId, + WorkflowExecutionId: nodeExecution.GetId().GetExecutionId(), Limit: RequestLimit, - UniqueParentId: nodeExecution.Id.NodeId, + UniqueParentId: nodeExecution.GetId().GetNodeId(), }) if err != nil { return err @@ -176,7 +176,7 @@ func (m *MetricsManager) parseBranchNodeExecution(ctx context.Context, // check if the node started if len(nodeExecutions) == 0 { - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, nodeExecution.Closure.UpdatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeSetup)) } else { // parse branchNode if len(nodeExecutions) != 1 { @@ -188,14 +188,14 @@ func (m *MetricsManager) parseBranchNodeExecution(ctx context.Context, branchNodeExecution = e } - node := getBranchNode(branchNodeExecution.Metadata.SpecNodeId, branchNode) + node := getBranchNode(branchNodeExecution.GetMetadata().GetSpecNodeId(), branchNode) if node == nil { return fmt.Errorf("failed to identify branch node final node definition for nodeID '%s' and branchNode '%+v'", - branchNodeExecution.Metadata.SpecNodeId, branchNode) + branchNodeExecution.GetMetadata().GetSpecNodeId(), branchNode) } // frontend overhead - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, branchNodeExecution.Closure.CreatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), branchNodeExecution.GetClosure().GetCreatedAt(), nodeSetup)) // node execution nodeExecutionSpan, err := m.parseNodeExecution(ctx, branchNodeExecution, node, depth) @@ -206,9 +206,9 @@ func (m *MetricsManager) parseBranchNodeExecution(ctx context.Context, *spans = append(*spans, nodeExecutionSpan) // backend overhead - if !nodeExecution.Closure.UpdatedAt.AsTime().Before(branchNodeExecution.Closure.UpdatedAt.AsTime()) { - *spans = append(*spans, createOperationSpan(branchNodeExecution.Closure.UpdatedAt, - nodeExecution.Closure.UpdatedAt, nodeTeardown)) + if !nodeExecution.GetClosure().GetUpdatedAt().AsTime().Before(branchNodeExecution.GetClosure().GetUpdatedAt().AsTime()) { + *spans = append(*spans, createOperationSpan(branchNodeExecution.GetClosure().GetUpdatedAt(), + nodeExecution.GetClosure().GetUpdatedAt(), nodeTeardown)) } } @@ -219,7 +219,7 @@ func (m *MetricsManager) parseBranchNodeExecution(ctx context.Context, // which are appended to the provided spans argument. func (m *MetricsManager) parseDynamicNodeExecution(ctx context.Context, nodeExecution *admin.NodeExecution, spans *[]*core.Span, depth int) error { taskExecutions, err := m.getTaskExecutions(ctx, &admin.TaskExecutionListRequest{ - NodeExecutionId: nodeExecution.Id, + NodeExecutionId: nodeExecution.GetId(), Limit: RequestLimit, }) if err != nil { @@ -228,18 +228,18 @@ func (m *MetricsManager) parseDynamicNodeExecution(ctx context.Context, nodeExec // if no task executions then everything is execution overhead if len(taskExecutions) == 0 { - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, nodeExecution.Closure.UpdatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeSetup)) } else { // frontend overhead - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, taskExecutions[0].Closure.CreatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), taskExecutions[0].GetClosure().GetCreatedAt(), nodeSetup)) // task execution(s) parseTaskExecutions(taskExecutions, spans, depth) nodeExecutions, err := m.getNodeExecutions(ctx, &admin.NodeExecutionListRequest{ - WorkflowExecutionId: nodeExecution.Id.ExecutionId, + WorkflowExecutionId: nodeExecution.GetId().GetExecutionId(), Limit: RequestLimit, - UniqueParentId: nodeExecution.Id.NodeId, + UniqueParentId: nodeExecution.GetId().GetNodeId(), }) if err != nil { return err @@ -247,31 +247,31 @@ func (m *MetricsManager) parseDynamicNodeExecution(ctx context.Context, nodeExec lastTask := taskExecutions[len(taskExecutions)-1] if len(nodeExecutions) == 0 { - if !nodeExecution.Closure.UpdatedAt.AsTime().Before(lastTask.Closure.UpdatedAt.AsTime()) { - *spans = append(*spans, createOperationSpan(lastTask.Closure.UpdatedAt, nodeExecution.Closure.UpdatedAt, nodeReset)) + if !nodeExecution.GetClosure().GetUpdatedAt().AsTime().Before(lastTask.GetClosure().GetUpdatedAt().AsTime()) { + *spans = append(*spans, createOperationSpan(lastTask.GetClosure().GetUpdatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeReset)) } } else { // between task execution(s) and node execution(s) overhead startNode := nodeExecutions[v1alpha1.StartNodeID] - *spans = append(*spans, createOperationSpan(taskExecutions[len(taskExecutions)-1].Closure.UpdatedAt, - startNode.Closure.UpdatedAt, nodeReset)) + *spans = append(*spans, createOperationSpan(taskExecutions[len(taskExecutions)-1].GetClosure().GetUpdatedAt(), + startNode.GetClosure().GetUpdatedAt(), nodeReset)) // node execution(s) - getDataRequest := &admin.NodeExecutionGetDataRequest{Id: nodeExecution.Id} + getDataRequest := &admin.NodeExecutionGetDataRequest{Id: nodeExecution.GetId()} nodeExecutionData, err := m.nodeExecutionManager.GetNodeExecutionData(ctx, getDataRequest) if err != nil { return err } - if err := m.parseNodeExecutions(ctx, nodeExecutions, nodeExecutionData.DynamicWorkflow.CompiledWorkflow, spans, depth); err != nil { + if err := m.parseNodeExecutions(ctx, nodeExecutions, nodeExecutionData.GetDynamicWorkflow().GetCompiledWorkflow(), spans, depth); err != nil { return err } // backend overhead latestUpstreamNode := m.getLatestUpstreamNodeExecution(v1alpha1.EndNodeID, - nodeExecutionData.DynamicWorkflow.CompiledWorkflow.Primary.Connections.Upstream, nodeExecutions) - if latestUpstreamNode != nil && !nodeExecution.Closure.UpdatedAt.AsTime().Before(latestUpstreamNode.Closure.UpdatedAt.AsTime()) { - *spans = append(*spans, createOperationSpan(latestUpstreamNode.Closure.UpdatedAt, nodeExecution.Closure.UpdatedAt, nodeTeardown)) + nodeExecutionData.GetDynamicWorkflow().GetCompiledWorkflow().GetPrimary().GetConnections().GetUpstream(), nodeExecutions) + if latestUpstreamNode != nil && !nodeExecution.GetClosure().GetUpdatedAt().AsTime().Before(latestUpstreamNode.GetClosure().GetUpdatedAt().AsTime()) { + *spans = append(*spans, createOperationSpan(latestUpstreamNode.GetClosure().GetUpdatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeTeardown)) } } } @@ -285,14 +285,14 @@ func (m *MetricsManager) parseExecution(ctx context.Context, execution *admin.Ex spans := make([]*core.Span, 0) if depth != 0 { // retrieve workflow and node executions - workflowRequest := &admin.ObjectGetRequest{Id: execution.Closure.WorkflowId} + workflowRequest := &admin.ObjectGetRequest{Id: execution.GetClosure().GetWorkflowId()} workflow, err := m.workflowManager.GetWorkflow(ctx, workflowRequest) if err != nil { return nil, err } nodeExecutions, err := m.getNodeExecutions(ctx, &admin.NodeExecutionListRequest{ - WorkflowExecutionId: execution.Id, + WorkflowExecutionId: execution.GetId(), Limit: RequestLimit, }) if err != nil { @@ -301,32 +301,32 @@ func (m *MetricsManager) parseExecution(ctx context.Context, execution *admin.Ex // check if workflow has started startNode := nodeExecutions[v1alpha1.StartNodeID] - if startNode.Closure.UpdatedAt == nil || reflect.DeepEqual(startNode.Closure.UpdatedAt, emptyTimestamp) { - spans = append(spans, createOperationSpan(execution.Closure.CreatedAt, execution.Closure.UpdatedAt, workflowSetup)) + if startNode.GetClosure().GetUpdatedAt() == nil || reflect.DeepEqual(startNode.GetClosure().GetUpdatedAt(), emptyTimestamp) { + spans = append(spans, createOperationSpan(execution.GetClosure().GetCreatedAt(), execution.GetClosure().GetUpdatedAt(), workflowSetup)) } else { // compute frontend overhead - spans = append(spans, createOperationSpan(execution.Closure.CreatedAt, startNode.Closure.UpdatedAt, workflowSetup)) + spans = append(spans, createOperationSpan(execution.GetClosure().GetCreatedAt(), startNode.GetClosure().GetUpdatedAt(), workflowSetup)) // iterate over nodes and compute overhead - if err := m.parseNodeExecutions(ctx, nodeExecutions, workflow.Closure.CompiledWorkflow, &spans, depth-1); err != nil { + if err := m.parseNodeExecutions(ctx, nodeExecutions, workflow.GetClosure().GetCompiledWorkflow(), &spans, depth-1); err != nil { return nil, err } // compute backend overhead latestUpstreamNode := m.getLatestUpstreamNodeExecution(v1alpha1.EndNodeID, - workflow.Closure.CompiledWorkflow.Primary.Connections.Upstream, nodeExecutions) - if latestUpstreamNode != nil && !execution.Closure.UpdatedAt.AsTime().Before(latestUpstreamNode.Closure.UpdatedAt.AsTime()) { - spans = append(spans, createOperationSpan(latestUpstreamNode.Closure.UpdatedAt, - execution.Closure.UpdatedAt, workflowTeardown)) + workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetConnections().GetUpstream(), nodeExecutions) + if latestUpstreamNode != nil && !execution.GetClosure().GetUpdatedAt().AsTime().Before(latestUpstreamNode.GetClosure().GetUpdatedAt().AsTime()) { + spans = append(spans, createOperationSpan(latestUpstreamNode.GetClosure().GetUpdatedAt(), + execution.GetClosure().GetUpdatedAt(), workflowTeardown)) } } } return &core.Span{ - StartTime: execution.Closure.CreatedAt, - EndTime: execution.Closure.UpdatedAt, + StartTime: execution.GetClosure().GetCreatedAt(), + EndTime: execution.GetClosure().GetUpdatedAt(), Id: &core.Span_WorkflowId{ - WorkflowId: execution.Id, + WorkflowId: execution.GetId(), }, Spans: spans, }, nil @@ -336,23 +336,23 @@ func (m *MetricsManager) parseExecution(ctx context.Context, execution *admin.Ex // which are appended to the provided spans argument. func (m *MetricsManager) parseGateNodeExecution(_ context.Context, nodeExecution *admin.NodeExecution, spans *[]*core.Span) { // check if node has started yet - if nodeExecution.Closure.StartedAt == nil || reflect.DeepEqual(nodeExecution.Closure.StartedAt, emptyTimestamp) { - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, nodeExecution.Closure.UpdatedAt, nodeSetup)) + if nodeExecution.GetClosure().GetStartedAt() == nil || reflect.DeepEqual(nodeExecution.GetClosure().GetStartedAt(), emptyTimestamp) { + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeSetup)) } else { // frontend overhead - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, nodeExecution.Closure.StartedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), nodeExecution.GetClosure().GetStartedAt(), nodeSetup)) // check if plugin has completed yet - if nodeExecution.Closure.Duration == nil || reflect.DeepEqual(nodeExecution.Closure.Duration, emptyDuration) { - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.StartedAt, - nodeExecution.Closure.UpdatedAt, nodeIdle)) + if nodeExecution.GetClosure().GetDuration() == nil || reflect.DeepEqual(nodeExecution.GetClosure().GetDuration(), emptyDuration) { + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetStartedAt(), + nodeExecution.GetClosure().GetUpdatedAt(), nodeIdle)) } else { // idle time - nodeEndTime := timestamppb.New(nodeExecution.Closure.StartedAt.AsTime().Add(nodeExecution.Closure.Duration.AsDuration())) - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.StartedAt, nodeEndTime, nodeIdle)) + nodeEndTime := timestamppb.New(nodeExecution.GetClosure().GetStartedAt().AsTime().Add(nodeExecution.GetClosure().GetDuration().AsDuration())) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetStartedAt(), nodeEndTime, nodeIdle)) // backend overhead - *spans = append(*spans, createOperationSpan(nodeEndTime, nodeExecution.Closure.UpdatedAt, nodeTeardown)) + *spans = append(*spans, createOperationSpan(nodeEndTime, nodeExecution.GetClosure().GetUpdatedAt(), nodeTeardown)) } } } @@ -361,19 +361,19 @@ func (m *MetricsManager) parseGateNodeExecution(_ context.Context, nodeExecution // Spans which are appended to the provided spans argument. func (m *MetricsManager) parseLaunchPlanNodeExecution(ctx context.Context, nodeExecution *admin.NodeExecution, spans *[]*core.Span, depth int) error { // check if workflow started yet - workflowNode := nodeExecution.Closure.GetWorkflowNodeMetadata() + workflowNode := nodeExecution.GetClosure().GetWorkflowNodeMetadata() if workflowNode == nil { - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, nodeExecution.Closure.UpdatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeSetup)) } else { // retrieve execution - executionRequest := &admin.WorkflowExecutionGetRequest{Id: workflowNode.ExecutionId} + executionRequest := &admin.WorkflowExecutionGetRequest{Id: workflowNode.GetExecutionId()} execution, err := m.executionManager.GetExecution(ctx, executionRequest) if err != nil { return err } // frontend overhead - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, execution.Closure.CreatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), execution.GetClosure().GetCreatedAt(), nodeSetup)) // execution span, err := m.parseExecution(ctx, execution, depth) @@ -384,8 +384,8 @@ func (m *MetricsManager) parseLaunchPlanNodeExecution(ctx context.Context, nodeE *spans = append(*spans, span) // backend overhead - if !nodeExecution.Closure.UpdatedAt.AsTime().Before(execution.Closure.UpdatedAt.AsTime()) { - *spans = append(*spans, createOperationSpan(execution.Closure.UpdatedAt, nodeExecution.Closure.UpdatedAt, nodeTeardown)) + if !nodeExecution.GetClosure().GetUpdatedAt().AsTime().Before(execution.GetClosure().GetUpdatedAt().AsTime()) { + *spans = append(*spans, createOperationSpan(execution.GetClosure().GetUpdatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeTeardown)) } } @@ -400,7 +400,7 @@ func (m *MetricsManager) parseNodeExecution(ctx context.Context, nodeExecution * // parse node var err error - switch target := node.Target.(type) { + switch target := node.GetTarget().(type) { case *core.Node_BranchNode: // handle branch node err = m.parseBranchNodeExecution(ctx, nodeExecution, target.BranchNode, &spans, depth-1) @@ -408,7 +408,7 @@ func (m *MetricsManager) parseNodeExecution(ctx context.Context, nodeExecution * // handle gate node m.parseGateNodeExecution(ctx, nodeExecution, &spans) case *core.Node_TaskNode: - if nodeExecution.Metadata.IsParentNode { + if nodeExecution.GetMetadata().GetIsParentNode() { // handle dynamic node err = m.parseDynamicNodeExecution(ctx, nodeExecution, &spans, depth-1) } else { @@ -416,7 +416,7 @@ func (m *MetricsManager) parseNodeExecution(ctx context.Context, nodeExecution * err = m.parseTaskNodeExecution(ctx, nodeExecution, &spans, depth-1) } case *core.Node_WorkflowNode: - switch workflow := target.WorkflowNode.Reference.(type) { + switch workflow := target.WorkflowNode.GetReference().(type) { case *core.WorkflowNode_LaunchplanRef: // handle launch plan err = m.parseLaunchPlanNodeExecution(ctx, nodeExecution, &spans, depth-1) @@ -436,10 +436,10 @@ func (m *MetricsManager) parseNodeExecution(ctx context.Context, nodeExecution * } return &core.Span{ - StartTime: nodeExecution.Closure.CreatedAt, - EndTime: nodeExecution.Closure.UpdatedAt, + StartTime: nodeExecution.GetClosure().GetCreatedAt(), + EndTime: nodeExecution.GetClosure().GetUpdatedAt(), Id: &core.Span_NodeId{ - NodeId: nodeExecution.Id, + NodeId: nodeExecution.GetId(), }, Spans: spans, }, nil @@ -456,29 +456,29 @@ func (m *MetricsManager) parseNodeExecutions(ctx context.Context, nodeExecutions sortedNodeExecutions = append(sortedNodeExecutions, nodeExecution) } sort.Slice(sortedNodeExecutions, func(i, j int) bool { - x := sortedNodeExecutions[i].Closure.CreatedAt.AsTime() - y := sortedNodeExecutions[j].Closure.CreatedAt.AsTime() + x := sortedNodeExecutions[i].GetClosure().GetCreatedAt().AsTime() + y := sortedNodeExecutions[j].GetClosure().GetCreatedAt().AsTime() return x.Before(y) }) // iterate over sorted node executions for _, nodeExecution := range sortedNodeExecutions { - specNodeID := nodeExecution.Metadata.SpecNodeId + specNodeID := nodeExecution.GetMetadata().GetSpecNodeId() if specNodeID == v1alpha1.StartNodeID || specNodeID == v1alpha1.EndNodeID { continue } // get node definition from workflow var node *core.Node - for _, n := range compiledWorkflowClosure.Primary.Template.Nodes { - if n.Id == specNodeID { + for _, n := range compiledWorkflowClosure.GetPrimary().GetTemplate().GetNodes() { + if n.GetId() == specNodeID { node = n } } if node == nil { return fmt.Errorf("failed to discover workflow node '%s' in workflow '%+v'", - specNodeID, compiledWorkflowClosure.Primary.Template.Id) + specNodeID, compiledWorkflowClosure.GetPrimary().GetTemplate().GetId()) } // parse node execution @@ -489,10 +489,10 @@ func (m *MetricsManager) parseNodeExecutions(ctx context.Context, nodeExecutions // prepend nodeExecution spans with node transition time latestUpstreamNode := m.getLatestUpstreamNodeExecution(specNodeID, - compiledWorkflowClosure.Primary.Connections.Upstream, nodeExecutions) + compiledWorkflowClosure.GetPrimary().GetConnections().GetUpstream(), nodeExecutions) if latestUpstreamNode != nil { - nodeExecutionSpan.Spans = append([]*core.Span{createOperationSpan(latestUpstreamNode.Closure.UpdatedAt, - nodeExecution.Closure.CreatedAt, nodeTransition)}, nodeExecutionSpan.Spans...) + nodeExecutionSpan.Spans = append([]*core.Span{createOperationSpan(latestUpstreamNode.GetClosure().GetUpdatedAt(), + nodeExecution.GetClosure().GetCreatedAt(), nodeTransition)}, nodeExecutionSpan.GetSpans()...) } *spans = append(*spans, nodeExecutionSpan) @@ -508,9 +508,9 @@ func (m *MetricsManager) parseSubworkflowNodeExecution(ctx context.Context, // retrieve node execution(s) nodeExecutions, err := m.getNodeExecutions(ctx, &admin.NodeExecutionListRequest{ - WorkflowExecutionId: nodeExecution.Id.ExecutionId, + WorkflowExecutionId: nodeExecution.GetId().GetExecutionId(), Limit: RequestLimit, - UniqueParentId: nodeExecution.Id.NodeId, + UniqueParentId: nodeExecution.GetId().GetNodeId(), }) if err != nil { return err @@ -518,11 +518,11 @@ func (m *MetricsManager) parseSubworkflowNodeExecution(ctx context.Context, // check if the subworkflow started if len(nodeExecutions) == 0 { - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, nodeExecution.Closure.UpdatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeSetup)) } else { // frontend overhead startNode := nodeExecutions[v1alpha1.StartNodeID] - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, startNode.Closure.UpdatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), startNode.GetClosure().GetUpdatedAt(), nodeSetup)) // retrieve workflow workflowRequest := &admin.ObjectGetRequest{Id: identifier} @@ -532,15 +532,15 @@ func (m *MetricsManager) parseSubworkflowNodeExecution(ctx context.Context, } // node execution(s) - if err := m.parseNodeExecutions(ctx, nodeExecutions, workflow.Closure.CompiledWorkflow, spans, depth); err != nil { + if err := m.parseNodeExecutions(ctx, nodeExecutions, workflow.GetClosure().GetCompiledWorkflow(), spans, depth); err != nil { return err } // backend overhead latestUpstreamNode := m.getLatestUpstreamNodeExecution(v1alpha1.EndNodeID, - workflow.Closure.CompiledWorkflow.Primary.Connections.Upstream, nodeExecutions) - if latestUpstreamNode != nil && !nodeExecution.Closure.UpdatedAt.AsTime().Before(latestUpstreamNode.Closure.UpdatedAt.AsTime()) { - *spans = append(*spans, createOperationSpan(latestUpstreamNode.Closure.UpdatedAt, nodeExecution.Closure.UpdatedAt, nodeTeardown)) + workflow.GetClosure().GetCompiledWorkflow().GetPrimary().GetConnections().GetUpstream(), nodeExecutions) + if latestUpstreamNode != nil && !nodeExecution.GetClosure().GetUpdatedAt().AsTime().Before(latestUpstreamNode.GetClosure().GetUpdatedAt().AsTime()) { + *spans = append(*spans, createOperationSpan(latestUpstreamNode.GetClosure().GetUpdatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeTeardown)) } } @@ -553,32 +553,32 @@ func parseTaskExecution(taskExecution *admin.TaskExecution) *core.Span { spans := make([]*core.Span, 0) // check if plugin has started yet - if taskExecution.Closure.StartedAt == nil || reflect.DeepEqual(taskExecution.Closure.StartedAt, emptyTimestamp) { - spans = append(spans, createOperationSpan(taskExecution.Closure.CreatedAt, taskExecution.Closure.UpdatedAt, taskSetup)) + if taskExecution.GetClosure().GetStartedAt() == nil || reflect.DeepEqual(taskExecution.GetClosure().GetStartedAt(), emptyTimestamp) { + spans = append(spans, createOperationSpan(taskExecution.GetClosure().GetCreatedAt(), taskExecution.GetClosure().GetUpdatedAt(), taskSetup)) } else { // frontend overhead - spans = append(spans, createOperationSpan(taskExecution.Closure.CreatedAt, taskExecution.Closure.StartedAt, taskSetup)) + spans = append(spans, createOperationSpan(taskExecution.GetClosure().GetCreatedAt(), taskExecution.GetClosure().GetStartedAt(), taskSetup)) // check if plugin has completed yet - if taskExecution.Closure.Duration == nil || reflect.DeepEqual(taskExecution.Closure.Duration, emptyDuration) { - spans = append(spans, createOperationSpan(taskExecution.Closure.StartedAt, taskExecution.Closure.UpdatedAt, taskRuntime)) + if taskExecution.GetClosure().GetDuration() == nil || reflect.DeepEqual(taskExecution.GetClosure().GetDuration(), emptyDuration) { + spans = append(spans, createOperationSpan(taskExecution.GetClosure().GetStartedAt(), taskExecution.GetClosure().GetUpdatedAt(), taskRuntime)) } else { // plugin execution - taskEndTime := timestamppb.New(taskExecution.Closure.StartedAt.AsTime().Add(taskExecution.Closure.Duration.AsDuration())) - spans = append(spans, createOperationSpan(taskExecution.Closure.StartedAt, taskEndTime, taskRuntime)) + taskEndTime := timestamppb.New(taskExecution.GetClosure().GetStartedAt().AsTime().Add(taskExecution.GetClosure().GetDuration().AsDuration())) + spans = append(spans, createOperationSpan(taskExecution.GetClosure().GetStartedAt(), taskEndTime, taskRuntime)) // backend overhead - if !taskExecution.Closure.UpdatedAt.AsTime().Before(taskEndTime.AsTime()) { - spans = append(spans, createOperationSpan(taskEndTime, taskExecution.Closure.UpdatedAt, taskTeardown)) + if !taskExecution.GetClosure().GetUpdatedAt().AsTime().Before(taskEndTime.AsTime()) { + spans = append(spans, createOperationSpan(taskEndTime, taskExecution.GetClosure().GetUpdatedAt(), taskTeardown)) } } } return &core.Span{ - StartTime: taskExecution.Closure.CreatedAt, - EndTime: taskExecution.Closure.UpdatedAt, + StartTime: taskExecution.GetClosure().GetCreatedAt(), + EndTime: taskExecution.GetClosure().GetUpdatedAt(), Id: &core.Span_TaskId{ - TaskId: taskExecution.Id, + TaskId: taskExecution.GetId(), }, Spans: spans, } @@ -589,15 +589,15 @@ func parseTaskExecution(taskExecution *admin.TaskExecution) *core.Span { func parseTaskExecutions(taskExecutions []*admin.TaskExecution, spans *[]*core.Span, depth int) { // sort task executions sort.Slice(taskExecutions, func(i, j int) bool { - x := taskExecutions[i].Closure.CreatedAt.AsTime() - y := taskExecutions[j].Closure.CreatedAt.AsTime() + x := taskExecutions[i].GetClosure().GetCreatedAt().AsTime() + y := taskExecutions[j].GetClosure().GetCreatedAt().AsTime() return x.Before(y) }) // iterate over task executions for index, taskExecution := range taskExecutions { if index > 0 { - *spans = append(*spans, createOperationSpan(taskExecutions[index-1].Closure.UpdatedAt, taskExecution.Closure.CreatedAt, nodeReset)) + *spans = append(*spans, createOperationSpan(taskExecutions[index-1].GetClosure().GetUpdatedAt(), taskExecution.GetClosure().GetCreatedAt(), nodeReset)) } if depth != 0 { @@ -611,7 +611,7 @@ func parseTaskExecutions(taskExecutions []*admin.TaskExecution, spans *[]*core.S func (m *MetricsManager) parseTaskNodeExecution(ctx context.Context, nodeExecution *admin.NodeExecution, spans *[]*core.Span, depth int) error { // retrieve task executions taskExecutions, err := m.getTaskExecutions(ctx, &admin.TaskExecutionListRequest{ - NodeExecutionId: nodeExecution.Id, + NodeExecutionId: nodeExecution.GetId(), Limit: RequestLimit, }) if err != nil { @@ -620,19 +620,19 @@ func (m *MetricsManager) parseTaskNodeExecution(ctx context.Context, nodeExecuti // if no task executions then everything is execution overhead if len(taskExecutions) == 0 { - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, nodeExecution.Closure.UpdatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), nodeExecution.GetClosure().GetUpdatedAt(), nodeSetup)) } else { // frontend overhead - *spans = append(*spans, createOperationSpan(nodeExecution.Closure.CreatedAt, taskExecutions[0].Closure.CreatedAt, nodeSetup)) + *spans = append(*spans, createOperationSpan(nodeExecution.GetClosure().GetCreatedAt(), taskExecutions[0].GetClosure().GetCreatedAt(), nodeSetup)) // parse task executions parseTaskExecutions(taskExecutions, spans, depth) // backend overhead lastTask := taskExecutions[len(taskExecutions)-1] - if !nodeExecution.Closure.UpdatedAt.AsTime().Before(lastTask.Closure.UpdatedAt.AsTime()) { - *spans = append(*spans, createOperationSpan(taskExecutions[len(taskExecutions)-1].Closure.UpdatedAt, - nodeExecution.Closure.UpdatedAt, nodeTeardown)) + if !nodeExecution.GetClosure().GetUpdatedAt().AsTime().Before(lastTask.GetClosure().GetUpdatedAt().AsTime()) { + *spans = append(*spans, createOperationSpan(taskExecutions[len(taskExecutions)-1].GetClosure().GetUpdatedAt(), + nodeExecution.GetClosure().GetUpdatedAt(), nodeTeardown)) } } @@ -645,13 +645,13 @@ func (m *MetricsManager) GetExecutionMetrics(ctx context.Context, request *admin.WorkflowExecutionGetMetricsRequest) (*admin.WorkflowExecutionGetMetricsResponse, error) { // retrieve workflow execution - executionRequest := &admin.WorkflowExecutionGetRequest{Id: request.Id} + executionRequest := &admin.WorkflowExecutionGetRequest{Id: request.GetId()} execution, err := m.executionManager.GetExecution(ctx, executionRequest) if err != nil { return nil, err } - span, err := m.parseExecution(ctx, execution, int(request.Depth)) + span, err := m.parseExecution(ctx, execution, int(request.GetDepth())) if err != nil { return nil, err } diff --git a/flyteadmin/pkg/manager/impl/metrics_manager_test.go b/flyteadmin/pkg/manager/impl/metrics_manager_test.go index e9392be8d9..b99e0d3243 100644 --- a/flyteadmin/pkg/manager/impl/metrics_manager_test.go +++ b/flyteadmin/pkg/manager/impl/metrics_manager_test.go @@ -28,8 +28,8 @@ var ( func addTimestamp(ts *timestamp.Timestamp, seconds int64) *timestamp.Timestamp { return ×tamp.Timestamp{ - Seconds: ts.Seconds + seconds, - Nanos: ts.Nanos, + Seconds: ts.GetSeconds() + seconds, + Nanos: ts.GetNanos(), } } @@ -89,10 +89,10 @@ func parseSpans(spans []*core.Span) (map[string][]int64, int) { operationDurations := make(map[string][]int64) referenceCount := 0 for _, span := range spans { - switch id := span.Id.(type) { + switch id := span.GetId().(type) { case *core.Span_OperationId: operationID := id.OperationId - duration := span.EndTime.Seconds - span.StartTime.Seconds + duration := span.GetEndTime().GetSeconds() - span.GetStartTime().GetSeconds() if array, exists := operationDurations[operationID]; exists { operationDurations[operationID] = append(array, duration) } else { @@ -907,11 +907,11 @@ func TestParseTaskExecution(t *testing.T) { t.Run(test.name, func(t *testing.T) { // parse task execution span := parseTaskExecution(test.taskExecution) - _, ok := span.Id.(*core.Span_TaskId) + _, ok := span.GetId().(*core.Span_TaskId) assert.True(t, ok) // validate spans - operationDurations, referenceCount := parseSpans(span.Spans) + operationDurations, referenceCount := parseSpans(span.GetSpans()) assert.True(t, reflect.DeepEqual(test.operationDurations, operationDurations)) assert.Equal(t, 0, referenceCount) }) diff --git a/flyteadmin/pkg/manager/impl/named_entity_manager.go b/flyteadmin/pkg/manager/impl/named_entity_manager.go index 883948318a..a8ab24261e 100644 --- a/flyteadmin/pkg/manager/impl/named_entity_manager.go +++ b/flyteadmin/pkg/manager/impl/named_entity_manager.go @@ -41,10 +41,10 @@ func (m *NamedEntityManager) UpdateNamedEntity(ctx context.Context, request *adm logger.Debugf(ctx, "invalid request [%+v]: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Id.Project, request.Id.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetId().GetProject(), request.GetId().GetDomain()) // Ensure entity exists before trying to update it - _, err := util.GetNamedEntity(ctx, m.db, request.ResourceType, request.Id) + _, err := util.GetNamedEntity(ctx, m.db, request.GetResourceType(), request.GetId()) if err != nil { return nil, err } @@ -52,7 +52,7 @@ func (m *NamedEntityManager) UpdateNamedEntity(ctx context.Context, request *adm metadataModel := transformers.CreateNamedEntityModel(request) err = m.db.NamedEntityRepo().Update(ctx, metadataModel) if err != nil { - logger.Debugf(ctx, "Failed to update named_entity for [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to update named_entity for [%+v] with err %v", request.GetId(), err) return nil, err } return &admin.NamedEntityUpdateResponse{}, nil @@ -64,8 +64,8 @@ func (m *NamedEntityManager) GetNamedEntity(ctx context.Context, request *admin. logger.Debugf(ctx, "invalid request [%+v]: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Id.Project, request.Id.Domain) - return util.GetNamedEntity(ctx, m.db, request.ResourceType, request.Id) + ctx = contextutils.WithProjectDomain(ctx, request.GetId().GetProject(), request.GetId().GetDomain()) + return util.GetNamedEntity(ctx, m.db, request.GetResourceType(), request.GetId()) } func (m *NamedEntityManager) getQueryFilters(requestFilters string) ([]common.InlineFilter, error) { @@ -98,51 +98,51 @@ func (m *NamedEntityManager) ListNamedEntities(ctx context.Context, request *adm logger.Debugf(ctx, "invalid request [%+v]: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Project, request.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetProject(), request.GetDomain()) - if len(request.Filters) == 0 { + if len(request.GetFilters()) == 0 { // Add implicit filter to exclude system generated workflows request.Filters = fmt.Sprintf("not_like(name,%s)", ".flytegen%") } // HACK: In order to filter by state (if requested) - we need to amend the filter to use COALESCE // e.g. eq(state, 1) becomes 'WHERE (COALESCE(state, 0) = '1')' since not every NamedEntity necessarily // has an entry, and therefore the default state value '0' (active), should be assumed. - filters, err := m.getQueryFilters(request.Filters) + filters, err := m.getQueryFilters(request.GetFilters()) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.NamedEntityColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.NamedEntityColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListNamedEntities", request.Token) + "invalid pagination token %s for ListNamedEntities", request.GetToken()) } listInput := repoInterfaces.ListNamedEntityInput{ ListResourceInput: repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, }, - Project: request.Project, - Domain: request.Domain, - ResourceType: request.ResourceType, + Project: request.GetProject(), + Domain: request.GetDomain(), + ResourceType: request.GetResourceType(), } output, err := m.db.NamedEntityRepo().List(ctx, listInput) if err != nil { logger.Debugf(ctx, "Failed to list named entities of type: %s with project: %s, domain: %s. Returned error was: %v", - request.ResourceType, request.Project, request.Domain, err) + request.GetResourceType(), request.GetProject(), request.GetDomain(), err) return nil, err } var token string - if len(output.Entities) == int(request.Limit) { + if len(output.Entities) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(output.Entities)) } entities := transformers.FromNamedEntityModels(output.Entities) diff --git a/flyteadmin/pkg/manager/impl/node_execution_manager.go b/flyteadmin/pkg/manager/impl/node_execution_manager.go index 2f0f60977c..82e51bec9b 100644 --- a/flyteadmin/pkg/manager/impl/node_execution_manager.go +++ b/flyteadmin/pkg/manager/impl/node_execution_manager.go @@ -72,30 +72,30 @@ var isParent = common.NewMapFilter(map[string]interface{}{ }) func getNodeExecutionContext(ctx context.Context, identifier *core.NodeExecutionIdentifier) context.Context { - ctx = contextutils.WithProjectDomain(ctx, identifier.ExecutionId.Project, identifier.ExecutionId.Domain) - ctx = contextutils.WithExecutionID(ctx, identifier.ExecutionId.Name) - return contextutils.WithNodeID(ctx, identifier.NodeId) + ctx = contextutils.WithProjectDomain(ctx, identifier.GetExecutionId().GetProject(), identifier.GetExecutionId().GetDomain()) + ctx = contextutils.WithExecutionID(ctx, identifier.GetExecutionId().GetName()) + return contextutils.WithNodeID(ctx, identifier.GetNodeId()) } func (m *NodeExecutionManager) createNodeExecutionWithEvent( ctx context.Context, request *admin.NodeExecutionEventRequest, dynamicWorkflowRemoteClosureReference string) error { var parentTaskExecutionID *uint - if request.Event.ParentTaskMetadata != nil { - taskExecutionModel, err := util.GetTaskExecutionModel(ctx, m.db, request.Event.ParentTaskMetadata.Id) + if request.GetEvent().GetParentTaskMetadata() != nil { + taskExecutionModel, err := util.GetTaskExecutionModel(ctx, m.db, request.GetEvent().GetParentTaskMetadata().GetId()) if err != nil { return err } parentTaskExecutionID = &taskExecutionModel.ID } var parentID *uint - if request.Event.ParentNodeMetadata != nil { + if request.GetEvent().GetParentNodeMetadata() != nil { parentNodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, &core.NodeExecutionIdentifier{ - ExecutionId: request.Event.Id.ExecutionId, - NodeId: request.Event.ParentNodeMetadata.NodeId, + ExecutionId: request.GetEvent().GetId().GetExecutionId(), + NodeId: request.GetEvent().GetParentNodeMetadata().GetNodeId(), }) if err != nil { logger.Errorf(ctx, "failed to fetch node execution for the parent node: %v %s with err", - request.Event.Id.ExecutionId, request.Event.ParentNodeMetadata.NodeId, err) + request.GetEvent().GetId().GetExecutionId(), request.GetEvent().GetParentNodeMetadata().GetNodeId(), err) return err } parentID = &parentNodeExecutionModel.ID @@ -110,12 +110,12 @@ func (m *NodeExecutionManager) createNodeExecutionWithEvent( }) if err != nil { logger.Debugf(ctx, "failed to create node execution model for event request: %s with err: %v", - request.RequestId, err) + request.GetRequestId(), err) return err } if err := m.db.NodeExecutionRepo().Create(ctx, nodeExecutionModel); err != nil { logger.Debugf(ctx, "Failed to create node execution with id [%+v] and model [%+v] "+ - "with err %v", request.Event.Id, nodeExecutionModel, err) + "with err %v", request.GetEvent().GetId(), nodeExecutionModel, err) return err } m.metrics.ClosureSizeBytes.Observe(float64(len(nodeExecutionModel.Closure))) @@ -127,21 +127,21 @@ func (m *NodeExecutionManager) updateNodeExecutionWithEvent( dynamicWorkflowRemoteClosureReference string) (updateNodeExecutionStatus, error) { // If we have an existing execution, check if the phase change is valid nodeExecPhase := core.NodeExecution_Phase(core.NodeExecution_Phase_value[nodeExecutionModel.Phase]) - if nodeExecPhase == request.Event.Phase { - logger.Debugf(ctx, "This phase was already recorded %v for %+v", nodeExecPhase.String(), request.Event.Id) + if nodeExecPhase == request.GetEvent().GetPhase() { + logger.Debugf(ctx, "This phase was already recorded %v for %+v", nodeExecPhase.String(), request.GetEvent().GetId()) return updateFailed, errors.NewFlyteAdminErrorf(codes.AlreadyExists, - "This phase was already recorded %v for %+v", nodeExecPhase.String(), request.Event.Id) + "This phase was already recorded %v for %+v", nodeExecPhase.String(), request.GetEvent().GetId()) } else if common.IsNodeExecutionTerminal(nodeExecPhase) { // Cannot go from a terminal state to anything else logger.Warnf(ctx, "Invalid phase change from %v to %v for node execution %v", - nodeExecPhase.String(), request.Event.Phase.String(), request.Event.Id) + nodeExecPhase.String(), request.GetEvent().GetPhase().String(), request.GetEvent().GetId()) return alreadyInTerminalStatus, nil } // if this node execution kicked off a workflow, validate that the execution exists var childExecutionID *core.WorkflowExecutionIdentifier - if request.Event.GetWorkflowNodeMetadata() != nil { - childExecutionID = request.Event.GetWorkflowNodeMetadata().ExecutionId + if request.GetEvent().GetWorkflowNodeMetadata() != nil { + childExecutionID = request.GetEvent().GetWorkflowNodeMetadata().GetExecutionId() err := validation.ValidateWorkflowExecutionIdentifier(childExecutionID) if err != nil { logger.Errorf(ctx, "Invalid execution ID: %s with err: %v", @@ -158,13 +158,13 @@ func (m *NodeExecutionManager) updateNodeExecutionWithEvent( dynamicWorkflowRemoteClosureReference, m.config.ApplicationConfiguration().GetRemoteDataConfig().InlineEventDataPolicy, m.storageClient) if err != nil { - logger.Debugf(ctx, "failed to update node execution model: %+v with err: %v", request.Event.Id, err) + logger.Debugf(ctx, "failed to update node execution model: %+v with err: %v", request.GetEvent().GetId(), err) return updateFailed, err } err = m.db.NodeExecutionRepo().Update(ctx, nodeExecutionModel) if err != nil { logger.Debugf(ctx, "Failed to update node execution with id [%+v] with err %v", - request.Event.Id, err) + request.GetEvent().GetId(), err) return updateFailed, err } @@ -172,17 +172,17 @@ func (m *NodeExecutionManager) updateNodeExecutionWithEvent( } func formatDynamicWorkflowID(identifier *core.Identifier) string { - return fmt.Sprintf("%s_%s_%s_%s", identifier.Project, identifier.Domain, identifier.Name, identifier.Version) + return fmt.Sprintf("%s_%s_%s_%s", identifier.GetProject(), identifier.GetDomain(), identifier.GetName(), identifier.GetVersion()) } func (m *NodeExecutionManager) uploadDynamicWorkflowClosure( ctx context.Context, nodeID *core.NodeExecutionIdentifier, workflowID *core.Identifier, compiledWorkflowClosure *core.CompiledWorkflowClosure) (storage.DataReference, error) { nestedSubKeys := []string{ - nodeID.ExecutionId.Project, - nodeID.ExecutionId.Domain, - nodeID.ExecutionId.Name, - nodeID.NodeId, + nodeID.GetExecutionId().GetProject(), + nodeID.GetExecutionId().GetDomain(), + nodeID.GetExecutionId().GetName(), + nodeID.GetNodeId(), formatDynamicWorkflowID(workflowID), } nestedKeys := append(m.storagePrefix, nestedSubKeys...) @@ -204,17 +204,17 @@ func (m *NodeExecutionManager) uploadDynamicWorkflowClosure( func (m *NodeExecutionManager) CreateNodeEvent(ctx context.Context, request *admin.NodeExecutionEventRequest) ( *admin.NodeExecutionEventResponse, error) { if err := validation.ValidateNodeExecutionEventRequest(request, m.config.ApplicationConfiguration().GetRemoteDataConfig().MaxSizeInBytes); err != nil { - logger.Debugf(ctx, "CreateNodeEvent called with invalid identifier [%+v]: %v", request.Event.Id, err) + logger.Debugf(ctx, "CreateNodeEvent called with invalid identifier [%+v]: %v", request.GetEvent().GetId(), err) } - ctx = getNodeExecutionContext(ctx, request.Event.Id) + ctx = getNodeExecutionContext(ctx, request.GetEvent().GetId()) logger.Debugf(ctx, "Received node execution event for Node Exec Id [%+v] transitioning to phase [%v], w/ Metadata [%v]", - request.Event.Id, request.Event.Phase, request.Event.ParentTaskMetadata) + request.GetEvent().GetId(), request.GetEvent().GetPhase(), request.GetEvent().GetParentTaskMetadata()) - executionID := request.Event.Id.ExecutionId + executionID := request.GetEvent().GetId().GetExecutionId() workflowExecution, err := m.db.ExecutionRepo().Get(ctx, repoInterfaces.Identifier{ - Project: executionID.Project, - Domain: executionID.Domain, - Name: executionID.Name, + Project: executionID.GetProject(), + Domain: executionID.GetDomain(), + Name: executionID.GetName(), }) if err != nil { m.metrics.MissingWorkflowExecution.Inc() @@ -228,15 +228,15 @@ func (m *NodeExecutionManager) CreateNodeEvent(ctx context.Context, request *adm return nil, fmt.Errorf("failed to get existing execution id: [%+v]", executionID) } - if err := validation.ValidateCluster(ctx, workflowExecution.Cluster, request.Event.ProducerId); err != nil { + if err := validation.ValidateCluster(ctx, workflowExecution.Cluster, request.GetEvent().GetProducerId()); err != nil { return nil, err } var dynamicWorkflowRemoteClosureReference string - if request.Event.GetTaskNodeMetadata() != nil && request.Event.GetTaskNodeMetadata().DynamicWorkflow != nil { + if request.GetEvent().GetTaskNodeMetadata() != nil && request.GetEvent().GetTaskNodeMetadata().GetDynamicWorkflow() != nil { dynamicWorkflowRemoteClosureDataReference, err := m.uploadDynamicWorkflowClosure( - ctx, request.Event.Id, request.Event.GetTaskNodeMetadata().DynamicWorkflow.Id, - request.Event.GetTaskNodeMetadata().DynamicWorkflow.CompiledWorkflow) + ctx, request.GetEvent().GetId(), request.GetEvent().GetTaskNodeMetadata().GetDynamicWorkflow().GetId(), + request.GetEvent().GetTaskNodeMetadata().GetDynamicWorkflow().GetCompiledWorkflow()) if err != nil { return nil, err } @@ -244,12 +244,12 @@ func (m *NodeExecutionManager) CreateNodeEvent(ctx context.Context, request *adm } nodeExecutionModel, err := m.db.NodeExecutionRepo().Get(ctx, repoInterfaces.NodeExecutionResource{ - NodeExecutionIdentifier: request.Event.Id, + NodeExecutionIdentifier: request.GetEvent().GetId(), }) if err != nil { if err.(errors.FlyteAdminError).Code() != codes.NotFound { logger.Debugf(ctx, "Failed to retrieve existing node execution with id [%+v] with err: %v", - request.Event.Id, err) + request.GetEvent().GetId(), err) return nil, err } err = m.createNodeExecutionWithEvent(ctx, request, dynamicWorkflowRemoteClosureReference) @@ -265,33 +265,33 @@ func (m *NodeExecutionManager) CreateNodeEvent(ctx context.Context, request *adm } if updateStatus == alreadyInTerminalStatus { - curPhase := request.Event.Phase.String() + curPhase := request.GetEvent().GetPhase().String() errorMsg := fmt.Sprintf("Invalid phase change from %s to %s for node execution %v", phase.String(), curPhase, nodeExecutionModel.ID) return nil, errors.NewAlreadyInTerminalStateError(ctx, errorMsg, curPhase) } } m.dbEventWriter.Write(request) - if request.Event.Phase == core.NodeExecution_RUNNING { + if request.GetEvent().GetPhase() == core.NodeExecution_RUNNING { m.metrics.ActiveNodeExecutions.Inc() - } else if common.IsNodeExecutionTerminal(request.Event.Phase) { + } else if common.IsNodeExecutionTerminal(request.GetEvent().GetPhase()) { m.metrics.ActiveNodeExecutions.Dec() - m.metrics.NodeExecutionsTerminated.Inc(contextutils.WithPhase(ctx, request.Event.Phase.String())) - if request.Event.GetOutputData() != nil { - m.metrics.NodeExecutionOutputBytes.Observe(float64(proto.Size(request.Event.GetOutputData()))) + m.metrics.NodeExecutionsTerminated.Inc(contextutils.WithPhase(ctx, request.GetEvent().GetPhase().String())) + if request.GetEvent().GetOutputData() != nil { + m.metrics.NodeExecutionOutputBytes.Observe(float64(proto.Size(request.GetEvent().GetOutputData()))) } } m.metrics.NodeExecutionEventsCreated.Inc() if err := m.eventPublisher.Publish(ctx, proto.MessageName(request), request); err != nil { m.metrics.PublishEventError.Inc() - logger.Infof(ctx, "error publishing event [%+v] with err: [%v]", request.RequestId, err) + logger.Infof(ctx, "error publishing event [%+v] with err: [%v]", request.GetRequestId(), err) } go func() { ceCtx := context.TODO() if err := m.cloudEventPublisher.Publish(ceCtx, proto.MessageName(request), request); err != nil { - logger.Infof(ctx, "error publishing cloud event [%+v] with err: [%v]", request.RequestId, err) + logger.Infof(ctx, "error publishing cloud event [%+v] with err: [%v]", request.GetRequestId(), err) } }() @@ -299,15 +299,15 @@ func (m *NodeExecutionManager) CreateNodeEvent(ctx context.Context, request *adm } func (m *NodeExecutionManager) GetDynamicNodeWorkflow(ctx context.Context, request *admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) { - if err := validation.ValidateNodeExecutionIdentifier(request.Id); err != nil { - logger.Debugf(ctx, "can't get node execution data with invalid identifier [%+v]: %v", request.Id, err) + if err := validation.ValidateNodeExecutionIdentifier(request.GetId()); err != nil { + logger.Debugf(ctx, "can't get node execution data with invalid identifier [%+v]: %v", request.GetId(), err) } - ctx = getNodeExecutionContext(ctx, request.Id) - nodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, request.Id) + ctx = getNodeExecutionContext(ctx, request.GetId()) + nodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Errorf(ctx, "failed to get node execution with id [%+v] with err %v", - request.Id, err) + request.GetId(), err) return nil, err } @@ -331,7 +331,7 @@ func (m *NodeExecutionManager) transformNodeExecutionModel(ctx context.Context, if err != nil { return nil, err } - if internalData.EventVersion == 0 { + if internalData.GetEventVersion() == 0 { // Issue more expensive query to determine whether this node is a parent and/or dynamic node. nodeExecutionModel, err = m.db.NodeExecutionRepo().GetWithChildren(ctx, repoInterfaces.NodeExecutionResource{ NodeExecutionIdentifier: nodeExecutionID, @@ -370,17 +370,17 @@ func (m *NodeExecutionManager) transformNodeExecutionModelList(ctx context.Conte func (m *NodeExecutionManager) GetNodeExecution( ctx context.Context, request *admin.NodeExecutionGetRequest) (*admin.NodeExecution, error) { - if err := validation.ValidateNodeExecutionIdentifier(request.Id); err != nil { - logger.Debugf(ctx, "get node execution called with invalid identifier [%+v]: %v", request.Id, err) + if err := validation.ValidateNodeExecutionIdentifier(request.GetId()); err != nil { + logger.Debugf(ctx, "get node execution called with invalid identifier [%+v]: %v", request.GetId(), err) } - ctx = getNodeExecutionContext(ctx, request.Id) - nodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, request.Id) + ctx = getNodeExecutionContext(ctx, request.GetId()) + nodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Debugf(ctx, "Failed to get node execution with id [%+v] with err %v", - request.Id, err) + request.GetId(), err) return nil, err } - nodeExecution, err := m.transformNodeExecutionModel(ctx, *nodeExecutionModel, request.Id, nil) + nodeExecution, err := m.transformNodeExecutionModel(ctx, *nodeExecutionModel, request.GetId(), nil) if err != nil { return nil, err } @@ -448,17 +448,17 @@ func (m *NodeExecutionManager) ListNodeExecutions( if err := validation.ValidateNodeExecutionListRequest(request); err != nil { return nil, err } - ctx = getExecutionContext(ctx, request.WorkflowExecutionId) + ctx = getExecutionContext(ctx, request.GetWorkflowExecutionId()) - identifierFilters, err := util.GetWorkflowExecutionIdentifierFilters(ctx, request.WorkflowExecutionId, common.NodeExecution) + identifierFilters, err := util.GetWorkflowExecutionIdentifierFilters(ctx, request.GetWorkflowExecutionId(), common.NodeExecution) if err != nil { return nil, err } var mapFilters []common.MapFilter - if request.UniqueParentId != "" { + if request.GetUniqueParentId() != "" { parentNodeExecution, err := util.GetNodeExecutionModel(ctx, m.db, &core.NodeExecutionIdentifier{ - ExecutionId: request.WorkflowExecutionId, - NodeId: request.UniqueParentId, + ExecutionId: request.GetWorkflowExecutionId(), + NodeId: request.GetUniqueParentId(), }) if err != nil { return nil, err @@ -475,7 +475,7 @@ func (m *NodeExecutionManager) ListNodeExecutions( } } return m.listNodeExecutions( - ctx, identifierFilters, request.Filters, request.Limit, request.Token, request.SortBy, mapFilters) + ctx, identifierFilters, request.GetFilters(), request.GetLimit(), request.GetToken(), request.GetSortBy(), mapFilters) } // Filters on node executions matching the execution parameters (execution project, domain, and name) as well as the @@ -486,13 +486,13 @@ func (m *NodeExecutionManager) ListNodeExecutionsForTask( if err := validation.ValidateNodeExecutionForTaskListRequest(request); err != nil { return nil, err } - ctx = getTaskExecutionContext(ctx, request.TaskExecutionId) + ctx = getTaskExecutionContext(ctx, request.GetTaskExecutionId()) identifierFilters, err := util.GetWorkflowExecutionIdentifierFilters( - ctx, request.TaskExecutionId.NodeExecutionId.ExecutionId, common.NodeExecution) + ctx, request.GetTaskExecutionId().GetNodeExecutionId().GetExecutionId(), common.NodeExecution) if err != nil { return nil, err } - parentTaskExecutionModel, err := util.GetTaskExecutionModel(ctx, m.db, request.TaskExecutionId) + parentTaskExecutionModel, err := util.GetTaskExecutionModel(ctx, m.db, request.GetTaskExecutionId()) if err != nil { return nil, err } @@ -503,26 +503,26 @@ func (m *NodeExecutionManager) ListNodeExecutionsForTask( } identifierFilters = append(identifierFilters, nodeIDFilter) return m.listNodeExecutions( - ctx, identifierFilters, request.Filters, request.Limit, request.Token, request.SortBy, nil) + ctx, identifierFilters, request.GetFilters(), request.GetLimit(), request.GetToken(), request.GetSortBy(), nil) } func (m *NodeExecutionManager) GetNodeExecutionData( ctx context.Context, request *admin.NodeExecutionGetDataRequest) (*admin.NodeExecutionGetDataResponse, error) { - if err := validation.ValidateNodeExecutionIdentifier(request.Id); err != nil { - logger.Debugf(ctx, "can't get node execution data with invalid identifier [%+v]: %v", request.Id, err) + if err := validation.ValidateNodeExecutionIdentifier(request.GetId()); err != nil { + logger.Debugf(ctx, "can't get node execution data with invalid identifier [%+v]: %v", request.GetId(), err) } - ctx = getNodeExecutionContext(ctx, request.Id) - nodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, request.Id) + ctx = getNodeExecutionContext(ctx, request.GetId()) + nodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, request.GetId()) if err != nil { logger.Debugf(ctx, "Failed to get node execution with id [%+v] with err %v", - request.Id, err) + request.GetId(), err) return nil, err } nodeExecution, err := transformers.FromNodeExecutionModel(*nodeExecutionModel, transformers.DefaultExecutionTransformerOptions) if err != nil { - logger.Debugf(ctx, "failed to transform node execution model [%+v] when fetching data: %v", request.Id, err) + logger.Debugf(ctx, "failed to transform node execution model [%+v] when fetching data: %v", request.GetId(), err) return nil, err } @@ -532,7 +532,7 @@ func (m *NodeExecutionManager) GetNodeExecutionData( group.Go(func() error { var err error inputs, inputURLBlob, err = util.GetInputs(groupCtx, m.urlData, m.config.ApplicationConfiguration().GetRemoteDataConfig(), - m.storageClient, nodeExecution.InputUri) + m.storageClient, nodeExecution.GetInputUri()) return err }) @@ -541,7 +541,7 @@ func (m *NodeExecutionManager) GetNodeExecutionData( group.Go(func() error { var err error outputs, outputURLBlob, err = util.GetOutputs(groupCtx, m.urlData, m.config.ApplicationConfiguration().GetRemoteDataConfig(), - m.storageClient, nodeExecution.Closure) + m.storageClient, nodeExecution.GetClosure()) return err }) @@ -555,7 +555,7 @@ func (m *NodeExecutionManager) GetNodeExecutionData( Outputs: outputURLBlob, FullInputs: inputs, FullOutputs: outputs, - FlyteUrls: common.FlyteURLsFromNodeExecutionID(request.Id, nodeExecution.GetClosure() != nil && nodeExecution.GetClosure().GetDeckUri() != ""), + FlyteUrls: common.FlyteURLsFromNodeExecutionID(request.GetId(), nodeExecution.GetClosure() != nil && nodeExecution.GetClosure().GetDeckUri() != ""), } if len(nodeExecutionModel.DynamicWorkflowRemoteClosureReference) > 0 { @@ -565,17 +565,17 @@ func (m *NodeExecutionManager) GetNodeExecutionData( } response.DynamicWorkflow = &admin.DynamicWorkflowNodeMetadata{ - Id: closure.Primary.Template.Id, + Id: closure.GetPrimary().GetTemplate().GetId(), CompiledWorkflow: closure, - DynamicJobSpecUri: nodeExecution.Closure.DynamicJobSpecUri, + DynamicJobSpecUri: nodeExecution.GetClosure().GetDynamicJobSpecUri(), } } - m.metrics.NodeExecutionInputBytes.Observe(float64(response.Inputs.Bytes)) - if response.Outputs.Bytes > 0 { - m.metrics.NodeExecutionOutputBytes.Observe(float64(response.Outputs.Bytes)) - } else if response.FullOutputs != nil { - m.metrics.NodeExecutionOutputBytes.Observe(float64(proto.Size(response.FullOutputs))) + m.metrics.NodeExecutionInputBytes.Observe(float64(response.GetInputs().GetBytes())) + if response.GetOutputs().GetBytes() > 0 { + m.metrics.NodeExecutionOutputBytes.Observe(float64(response.GetOutputs().GetBytes())) + } else if response.GetFullOutputs() != nil { + m.metrics.NodeExecutionOutputBytes.Observe(float64(proto.Size(response.GetFullOutputs()))) } return response, nil @@ -588,9 +588,9 @@ func (m *NodeExecutionManager) fetchDynamicWorkflowClosure(ctx context.Context, return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Unable to read WorkflowClosure from location %s : %v", ref, err) } - if wf := closure.Primary; wf == nil { + if wf := closure.GetPrimary(); wf == nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Empty primary workflow definition in loaded dynamic workflow model.") - } else if template := wf.Template; template == nil { + } else if template := wf.GetTemplate(); template == nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Empty primary workflow template in loaded dynamic workflow model.") } return closure, nil diff --git a/flyteadmin/pkg/manager/impl/node_execution_manager_test.go b/flyteadmin/pkg/manager/impl/node_execution_manager_test.go index b43c785b33..69a0203452 100644 --- a/flyteadmin/pkg/manager/impl/node_execution_manager_test.go +++ b/flyteadmin/pkg/manager/impl/node_execution_manager_test.go @@ -71,7 +71,7 @@ var request = &admin.NodeExecutionEventRequest{ TargetMetadata: &event.NodeExecutionEvent_TaskNodeMetadata{ TaskNodeMetadata: &event.TaskNodeMetadata{ DynamicWorkflow: &event.DynamicWorkflowNodeMetadata{ - Id: dynamicWorkflowClosure.Primary.Template.Id, + Id: dynamicWorkflowClosure.GetPrimary().GetTemplate().GetId(), CompiledWorkflow: dynamicWorkflowClosure, }, }, @@ -131,7 +131,7 @@ func TestCreateNodeEvent(t *testing.T) { return models.NodeExecution{}, flyteAdminErrors.NewFlyteAdminError(codes.NotFound, "foo") }) expectedClosure := admin.NodeExecutionClosure{ - Phase: request.Event.Phase, + Phase: request.GetEvent().GetPhase(), StartedAt: occurredAtProto, CreatedAt: occurredAtProto, UpdatedAt: occurredAtProto, @@ -450,8 +450,8 @@ func TestTransformNodeExecutionModel(t *testing.T) { } nodeExecution, err := manager.transformNodeExecutionModel(ctx, models.NodeExecution{}, nodeExecID, transformers.DefaultExecutionTransformerOptions) assert.NoError(t, err) - assert.True(t, proto.Equal(nodeExecID, nodeExecution.Id)) - assert.True(t, nodeExecution.Metadata.IsParentNode) + assert.True(t, proto.Equal(nodeExecID, nodeExecution.GetId())) + assert.True(t, nodeExecution.GetMetadata().GetIsParentNode()) }) t.Run("event version > 0", func(t *testing.T) { manager := NodeExecutionManager{ @@ -480,8 +480,8 @@ func TestTransformNodeExecutionModel(t *testing.T) { InternalData: internalDataBytes, }, nodeExecID, transformers.DefaultExecutionTransformerOptions) assert.NoError(t, err) - assert.True(t, nodeExecution.Metadata.IsParentNode) - assert.True(t, nodeExecution.Metadata.IsDynamic) + assert.True(t, nodeExecution.GetMetadata().GetIsParentNode()) + assert.True(t, nodeExecution.GetMetadata().GetIsDynamic()) }) t.Run("transform internal data err", func(t *testing.T) { manager := NodeExecutionManager{ @@ -865,7 +865,7 @@ func TestListNodeExecutionsLevelZero(t *testing.T) { }, }) assert.NoError(t, err) - assert.Len(t, nodeExecutions.NodeExecutions, 1) + assert.Len(t, nodeExecutions.GetNodeExecutions(), 1) assert.True(t, proto.Equal(&admin.NodeExecution{ Id: &core.NodeExecutionIdentifier{ NodeId: "node id", @@ -878,8 +878,8 @@ func TestListNodeExecutionsLevelZero(t *testing.T) { InputUri: "input uri", Closure: &expectedClosure, Metadata: &expectedMetadata, - }, nodeExecutions.NodeExecutions[0])) - assert.Equal(t, "3", nodeExecutions.Token) + }, nodeExecutions.GetNodeExecutions()[0])) + assert.Equal(t, "3", nodeExecutions.GetToken()) } func TestListNodeExecutionsWithParent(t *testing.T) { @@ -895,7 +895,7 @@ func TestListNodeExecutionsWithParent(t *testing.T) { closureBytes, _ := proto.Marshal(&expectedClosure) parentID := uint(12) repository.NodeExecutionRepo().(*repositoryMocks.MockNodeExecutionRepo).SetGetCallback(func(ctx context.Context, input interfaces.NodeExecutionResource) (execution models.NodeExecution, e error) { - assert.Equal(t, "parent_1", input.NodeExecutionIdentifier.NodeId) + assert.Equal(t, "parent_1", input.NodeExecutionIdentifier.GetNodeId()) return models.NodeExecution{ BaseModel: models.BaseModel{ ID: parentID, @@ -966,7 +966,7 @@ func TestListNodeExecutionsWithParent(t *testing.T) { UniqueParentId: "parent_1", }) assert.Nil(t, err) - assert.Len(t, nodeExecutions.NodeExecutions, 1) + assert.Len(t, nodeExecutions.GetNodeExecutions(), 1) assert.True(t, proto.Equal(&admin.NodeExecution{ Id: &core.NodeExecutionIdentifier{ NodeId: "node id", @@ -979,8 +979,8 @@ func TestListNodeExecutionsWithParent(t *testing.T) { InputUri: "input uri", Closure: &expectedClosure, Metadata: &expectedMetadata, - }, nodeExecutions.NodeExecutions[0])) - assert.Equal(t, "3", nodeExecutions.Token) + }, nodeExecutions.GetNodeExecutions()[0])) + assert.Equal(t, "3", nodeExecutions.GetToken()) } func TestListNodeExecutions_WithJoinTableFilter(t *testing.T) { @@ -1089,7 +1089,7 @@ func TestListNodeExecutions_WithJoinTableFilter(t *testing.T) { Filters: "eq(execution.phase, SUCCEEDED)", }) assert.NoError(t, err) - assert.Len(t, nodeExecutions.NodeExecutions, 1) + assert.Len(t, nodeExecutions.GetNodeExecutions(), 1) assert.True(t, proto.Equal(&admin.NodeExecution{ Id: &core.NodeExecutionIdentifier{ NodeId: "node id", @@ -1102,8 +1102,8 @@ func TestListNodeExecutions_WithJoinTableFilter(t *testing.T) { InputUri: "input uri", Closure: &expectedClosure, Metadata: &expectedMetadata, - }, nodeExecutions.NodeExecutions[0])) - assert.Equal(t, "3", nodeExecutions.Token) + }, nodeExecutions.GetNodeExecutions()[0])) + assert.Equal(t, "3", nodeExecutions.GetToken()) } func TestListNodeExecutions_InvalidParams(t *testing.T) { @@ -1316,7 +1316,7 @@ func TestListNodeExecutionsForTask(t *testing.T) { }, }) assert.Nil(t, err) - assert.Len(t, nodeExecutions.NodeExecutions, 1) + assert.Len(t, nodeExecutions.GetNodeExecutions(), 1) expectedMetadata := admin.NodeExecutionMetaData{ SpecNodeId: "spec-n1", IsParentNode: true, @@ -1333,8 +1333,8 @@ func TestListNodeExecutionsForTask(t *testing.T) { InputUri: "input uri", Closure: &expectedClosure, Metadata: &expectedMetadata, - }, nodeExecutions.NodeExecutions[0])) - assert.Equal(t, "3", nodeExecutions.Token) + }, nodeExecutions.GetNodeExecutions()[0])) + assert.Equal(t, "3", nodeExecutions.GetToken()) } func TestGetNodeExecutionData(t *testing.T) { @@ -1439,7 +1439,7 @@ func TestGetNodeExecutionData(t *testing.T) { FullInputs: fullInputs, FullOutputs: fullOutputs, DynamicWorkflow: &admin.DynamicWorkflowNodeMetadata{ - Id: dynamicWorkflowClosure.Primary.Template.Id, + Id: dynamicWorkflowClosure.GetPrimary().GetTemplate().GetId(), CompiledWorkflow: dynamicWorkflowClosure, }, FlyteUrls: &admin.FlyteURLs{ @@ -1465,7 +1465,7 @@ func Test_GetDynamicNodeWorkflow_Success(t *testing.T) { return models.NodeExecution{DynamicWorkflowRemoteClosureReference: remoteClosureIdentifier}, nil }) mockStorageClient := commonMocks.GetMockStorageClient() - expectedClosure := testutils.GetWorkflowClosure().CompiledWorkflow + expectedClosure := testutils.GetWorkflowClosure().GetCompiledWorkflow() mockStorageClient.ComposedProtobufStore.(*commonMocks.TestDataStore).ReadProtobufCb = func(ctx context.Context, reference storage.DataReference, msg proto.Message) error { assert.Equal(t, remoteClosureIdentifier, reference.String()) bytes, err := proto.Marshal(expectedClosure) diff --git a/flyteadmin/pkg/manager/impl/project_manager.go b/flyteadmin/pkg/manager/impl/project_manager.go index a1ac99b412..a19b61ca01 100644 --- a/flyteadmin/pkg/manager/impl/project_manager.go +++ b/flyteadmin/pkg/manager/impl/project_manager.go @@ -33,7 +33,7 @@ func (m *ProjectManager) CreateProject(ctx context.Context, request *admin.Proje if err := validation.ValidateProjectRegisterRequest(request); err != nil { return nil, err } - projectModel := transformers.CreateProjectModel(request.Project) + projectModel := transformers.CreateProjectModel(request.GetProject()) err := m.db.ProjectRepo().Create(ctx, projectModel) if err != nil { return nil, err @@ -44,14 +44,14 @@ func (m *ProjectManager) CreateProject(ctx context.Context, request *admin.Proje func (m *ProjectManager) ListProjects(ctx context.Context, request *admin.ProjectListRequest) (*admin.Projects, error) { spec := util.FilterSpec{ - RequestFilters: request.Filters, + RequestFilters: request.GetFilters(), } filters, err := util.GetDbFilters(spec, common.Project) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.ProjectColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.ProjectColumns) if err != nil { return nil, err } @@ -59,14 +59,14 @@ func (m *ProjectManager) ListProjects(ctx context.Context, request *admin.Projec sortParameter = alphabeticalSortParam } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListProjects", request.Token) + "invalid pagination token %s for ListProjects", request.GetToken()) } // And finally, query the database listProjectsInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, @@ -75,10 +75,10 @@ func (m *ProjectManager) ListProjects(ctx context.Context, request *admin.Projec if err != nil { return nil, err } - projects := transformers.FromProjectModels(projectModels, m.GetDomains(ctx, &admin.GetDomainRequest{}).Domains) + projects := transformers.FromProjectModels(projectModels, m.GetDomains(ctx, &admin.GetDomainRequest{}).GetDomains()) var token string - if len(projects) == int(request.Limit) { + if len(projects) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(projects)) } @@ -93,7 +93,7 @@ func (m *ProjectManager) UpdateProject(ctx context.Context, projectUpdate *admin projectRepo := m.db.ProjectRepo() // Fetch the existing project if exists. If not, return err and do not update. - _, err := projectRepo.Get(ctx, projectUpdate.Id) + _, err := projectRepo.Get(ctx, projectUpdate.GetId()) if err != nil { return nil, err } @@ -118,11 +118,11 @@ func (m *ProjectManager) GetProject(ctx context.Context, request *admin.ProjectG if err := validation.ValidateProjectGetRequest(request); err != nil { return nil, err } - projectModel, err := m.db.ProjectRepo().Get(ctx, request.Id) + projectModel, err := m.db.ProjectRepo().Get(ctx, request.GetId()) if err != nil { return nil, err } - projectResponse := transformers.FromProjectModel(projectModel, m.GetDomains(ctx, &admin.GetDomainRequest{}).Domains) + projectResponse := transformers.FromProjectModel(projectModel, m.GetDomains(ctx, &admin.GetDomainRequest{}).GetDomains()) return projectResponse, nil } diff --git a/flyteadmin/pkg/manager/impl/project_manager_test.go b/flyteadmin/pkg/manager/impl/project_manager_test.go index 38117a7ec9..42bf93cafb 100644 --- a/flyteadmin/pkg/manager/impl/project_manager_test.go +++ b/flyteadmin/pkg/manager/impl/project_manager_test.go @@ -70,11 +70,11 @@ func testListProjects(request *admin.ProjectListRequest, token string, orderExpr resp, err := projectManager.ListProjects(context.Background(), request) assert.NoError(t, err) - assert.Len(t, resp.Projects, 1) + assert.Len(t, resp.GetProjects(), 1) assert.Equal(t, token, resp.GetToken()) - assert.Len(t, resp.Projects[0].Domains, 4) - for _, domain := range resp.Projects[0].Domains { - assert.Contains(t, testDomainsForProjManager, domain.Id) + assert.Len(t, resp.GetProjects()[0].GetDomains(), 4) + for _, domain := range resp.GetProjects()[0].GetDomains() { + assert.Contains(t, testDomainsForProjManager, domain.GetId()) } } @@ -300,10 +300,10 @@ func TestProjectManager_TestGetProject(t *testing.T) { resp, _ := projectManager.GetProject(context.Background(), mockedProject) - assert.Equal(t, mockedProject.Id, resp.Id) - assert.Equal(t, "a-mocked-project", resp.Name) - assert.Equal(t, "A mocked project", resp.Description) - assert.Equal(t, admin.Project_ProjectState(0), resp.State) + assert.Equal(t, mockedProject.GetId(), resp.GetId()) + assert.Equal(t, "a-mocked-project", resp.GetName()) + assert.Equal(t, "A mocked project", resp.GetDescription()) + assert.Equal(t, admin.Project_ProjectState(0), resp.GetState()) } func TestProjectManager_TestGetProject_ErrorDueToProjectNotFound(t *testing.T) { diff --git a/flyteadmin/pkg/manager/impl/resources/resource_manager.go b/flyteadmin/pkg/manager/impl/resources/resource_manager.go index b1304930cf..4dad45d987 100644 --- a/flyteadmin/pkg/manager/impl/resources/resource_manager.go +++ b/flyteadmin/pkg/manager/impl/resources/resource_manager.go @@ -76,7 +76,7 @@ func (m *ResourceManager) createOrMergeUpdateWorkflowAttributes( return nil, err } updatedModel, err := transformers.MergeUpdateWorkflowAttributes( - ctx, existing, resourceType, &resourceID, request.Attributes) + ctx, existing, resourceType, &resourceID, request.GetAttributes()) if err != nil { return nil, err } @@ -96,11 +96,11 @@ func (m *ResourceManager) UpdateWorkflowAttributes( return nil, err } - model, err := transformers.WorkflowAttributesToResourceModel(request.Attributes, resource) + model, err := transformers.WorkflowAttributesToResourceModel(request.GetAttributes(), resource) if err != nil { return nil, err } - if request.Attributes.GetMatchingAttributes().GetPluginOverrides() != nil { + if request.GetAttributes().GetMatchingAttributes().GetPluginOverrides() != nil { return m.createOrMergeUpdateWorkflowAttributes(ctx, request, model, admin.MatchableResource_PLUGIN_OVERRIDE) } err = m.db.ResourceRepo().CreateOrUpdate(ctx, model) @@ -118,7 +118,7 @@ func (m *ResourceManager) GetWorkflowAttributes( return nil, err } workflowAttributesModel, err := m.db.ResourceRepo().Get( - ctx, repo_interface.ResourceID{Project: request.Project, Domain: request.Domain, Workflow: request.Workflow, ResourceType: request.ResourceType.String()}) + ctx, repo_interface.ResourceID{Project: request.GetProject(), Domain: request.GetDomain(), Workflow: request.GetWorkflow(), ResourceType: request.GetResourceType().String()}) if err != nil { return nil, err } @@ -137,11 +137,11 @@ func (m *ResourceManager) DeleteWorkflowAttributes(ctx context.Context, return nil, err } if err := m.db.ResourceRepo().Delete( - ctx, repo_interface.ResourceID{Project: request.Project, Domain: request.Domain, Workflow: request.Workflow, ResourceType: request.ResourceType.String()}); err != nil { + ctx, repo_interface.ResourceID{Project: request.GetProject(), Domain: request.GetDomain(), Workflow: request.GetWorkflow(), ResourceType: request.GetResourceType().String()}); err != nil { return nil, err } - logger.Infof(ctx, "Deleted workflow attributes for: %s-%s-%s (%s)", request.Project, - request.Domain, request.Workflow, request.ResourceType.String()) + logger.Infof(ctx, "Deleted workflow attributes for: %s-%s-%s (%s)", request.GetProject(), + request.GetDomain(), request.GetWorkflow(), request.GetResourceType().String()) return &admin.WorkflowAttributesDeleteResponse{}, nil } @@ -154,12 +154,12 @@ func (m *ResourceManager) UpdateProjectAttributes(ctx context.Context, request * if resource, err = validation.ValidateProjectAttributesUpdateRequest(ctx, m.db, request); err != nil { return nil, err } - model, err := transformers.ProjectAttributesToResourceModel(request.Attributes, resource) + model, err := transformers.ProjectAttributesToResourceModel(request.GetAttributes(), resource) if err != nil { return nil, err } - if request.Attributes.GetMatchingAttributes().GetPluginOverrides() != nil { + if request.GetAttributes().GetMatchingAttributes().GetPluginOverrides() != nil { return m.createOrMergeUpdateProjectAttributes(ctx, request, model, admin.MatchableResource_PLUGIN_OVERRIDE) } @@ -174,12 +174,12 @@ func (m *ResourceManager) UpdateProjectAttributes(ctx context.Context, request * func (m *ResourceManager) GetProjectAttributesBase(ctx context.Context, request *admin.ProjectAttributesGetRequest) ( *admin.ProjectAttributesGetResponse, error) { - if err := validation.ValidateProjectExists(ctx, m.db, request.Project); err != nil { + if err := validation.ValidateProjectExists(ctx, m.db, request.GetProject()); err != nil { return nil, err } projectAttributesModel, err := m.db.ResourceRepo().GetProjectLevel( - ctx, repo_interface.ResourceID{Project: request.Project, Domain: "", ResourceType: request.ResourceType.String()}) + ctx, repo_interface.ResourceID{Project: request.GetProject(), Domain: "", ResourceType: request.GetResourceType().String()}) if err != nil { return nil, err } @@ -191,8 +191,8 @@ func (m *ResourceManager) GetProjectAttributesBase(ctx context.Context, request return &admin.ProjectAttributesGetResponse{ Attributes: &admin.ProjectAttributes{ - Project: request.Project, - MatchingAttributes: ma.Attributes, + Project: request.GetProject(), + MatchingAttributes: ma.GetAttributes(), }, }, nil } @@ -208,11 +208,11 @@ func (m *ResourceManager) GetProjectAttributes(ctx context.Context, request *adm configLevelDefaults := m.config.GetTopLevelConfig().GetAsWorkflowExecutionConfig() if err != nil { ec, ok := err.(errors.FlyteAdminError) - if ok && ec.Code() == codes.NotFound && request.ResourceType == admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG { + if ok && ec.Code() == codes.NotFound && request.GetResourceType() == admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG { // TODO: Will likely be removed after overarching settings project is done return &admin.ProjectAttributesGetResponse{ Attributes: &admin.ProjectAttributes{ - Project: request.Project, + Project: request.GetProject(), MatchingAttributes: &admin.MatchingAttributes{ Target: &admin.MatchingAttributes_WorkflowExecutionConfig{ WorkflowExecutionConfig: configLevelDefaults, @@ -227,14 +227,14 @@ func (m *ResourceManager) GetProjectAttributes(ctx context.Context, request *adm // If found, then merge result with the default values for the platform // TODO: Remove this logic once the overarching settings project is done. Those endpoints should take // default configuration into account. - responseAttributes := getResponse.Attributes.GetMatchingAttributes().GetWorkflowExecutionConfig() + responseAttributes := getResponse.GetAttributes().GetMatchingAttributes().GetWorkflowExecutionConfig() if responseAttributes != nil { logger.Warningf(ctx, "Merging response %s with defaults %s", responseAttributes, configLevelDefaults) tmp := util.MergeIntoExecConfig(responseAttributes, configLevelDefaults) responseAttributes = tmp return &admin.ProjectAttributesGetResponse{ Attributes: &admin.ProjectAttributes{ - Project: request.Project, + Project: request.GetProject(), MatchingAttributes: &admin.MatchingAttributes{ Target: &admin.MatchingAttributes_WorkflowExecutionConfig{ WorkflowExecutionConfig: responseAttributes, @@ -250,14 +250,14 @@ func (m *ResourceManager) GetProjectAttributes(ctx context.Context, request *adm func (m *ResourceManager) DeleteProjectAttributes(ctx context.Context, request *admin.ProjectAttributesDeleteRequest) ( *admin.ProjectAttributesDeleteResponse, error) { - if err := validation.ValidateProjectForUpdate(ctx, m.db, request.Project); err != nil { + if err := validation.ValidateProjectForUpdate(ctx, m.db, request.GetProject()); err != nil { return nil, err } if err := m.db.ResourceRepo().Delete( - ctx, repo_interface.ResourceID{Project: request.Project, ResourceType: request.ResourceType.String()}); err != nil { + ctx, repo_interface.ResourceID{Project: request.GetProject(), ResourceType: request.GetResourceType().String()}); err != nil { return nil, err } - logger.Infof(ctx, "Deleted project attributes for: %s-%s (%s)", request.Project, request.ResourceType.String()) + logger.Infof(ctx, "Deleted project attributes for: %s-%s (%s)", request.GetProject(), request.GetResourceType().String()) return &admin.ProjectAttributesDeleteResponse{}, nil } @@ -285,7 +285,7 @@ func (m *ResourceManager) createOrMergeUpdateProjectDomainAttributes( return nil, err } updatedModel, err := transformers.MergeUpdatePluginAttributes( - ctx, existing, resourceType, &resourceID, request.Attributes.MatchingAttributes) + ctx, existing, resourceType, &resourceID, request.GetAttributes().GetMatchingAttributes()) if err != nil { return nil, err } @@ -321,7 +321,7 @@ func (m *ResourceManager) createOrMergeUpdateProjectAttributes( return nil, err } updatedModel, err := transformers.MergeUpdatePluginAttributes( - ctx, existing, resourceType, &resourceID, request.Attributes.MatchingAttributes) + ctx, existing, resourceType, &resourceID, request.GetAttributes().GetMatchingAttributes()) if err != nil { return nil, err } @@ -340,13 +340,13 @@ func (m *ResourceManager) UpdateProjectDomainAttributes( if resource, err = validation.ValidateProjectDomainAttributesUpdateRequest(ctx, m.db, m.config, request); err != nil { return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Attributes.Project, request.Attributes.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetAttributes().GetProject(), request.GetAttributes().GetDomain()) - model, err := transformers.ProjectDomainAttributesToResourceModel(request.Attributes, resource) + model, err := transformers.ProjectDomainAttributesToResourceModel(request.GetAttributes(), resource) if err != nil { return nil, err } - if request.Attributes.GetMatchingAttributes().GetPluginOverrides() != nil { + if request.GetAttributes().GetMatchingAttributes().GetPluginOverrides() != nil { return m.createOrMergeUpdateProjectDomainAttributes(ctx, request, model, admin.MatchableResource_PLUGIN_OVERRIDE) } err = m.db.ResourceRepo().CreateOrUpdate(ctx, model) @@ -363,7 +363,7 @@ func (m *ResourceManager) GetProjectDomainAttributes( return nil, err } projectAttributesModel, err := m.db.ResourceRepo().Get( - ctx, repo_interface.ResourceID{Project: request.Project, Domain: request.Domain, ResourceType: request.ResourceType.String()}) + ctx, repo_interface.ResourceID{Project: request.GetProject(), Domain: request.GetDomain(), ResourceType: request.GetResourceType().String()}) if err != nil { return nil, err } @@ -382,11 +382,11 @@ func (m *ResourceManager) DeleteProjectDomainAttributes(ctx context.Context, return nil, err } if err := m.db.ResourceRepo().Delete( - ctx, repo_interface.ResourceID{Project: request.Project, Domain: request.Domain, ResourceType: request.ResourceType.String()}); err != nil { + ctx, repo_interface.ResourceID{Project: request.GetProject(), Domain: request.GetDomain(), ResourceType: request.GetResourceType().String()}); err != nil { return nil, err } - logger.Infof(ctx, "Deleted project-domain attributes for: %s-%s (%s)", request.Project, - request.Domain, request.ResourceType.String()) + logger.Infof(ctx, "Deleted project-domain attributes for: %s-%s (%s)", request.GetProject(), + request.GetDomain(), request.GetResourceType().String()) return &admin.ProjectDomainAttributesDeleteResponse{}, nil } @@ -395,7 +395,7 @@ func (m *ResourceManager) ListAll(ctx context.Context, request *admin.ListMatcha if err := validation.ValidateListAllMatchableAttributesRequest(request); err != nil { return nil, err } - resources, err := m.db.ResourceRepo().ListAll(ctx, request.ResourceType.String()) + resources, err := m.db.ResourceRepo().ListAll(ctx, request.GetResourceType().String()) if err != nil { return nil, err } diff --git a/flyteadmin/pkg/manager/impl/resources/resource_manager_test.go b/flyteadmin/pkg/manager/impl/resources/resource_manager_test.go index 8352de1d7b..be03d642ab 100644 --- a/flyteadmin/pkg/manager/impl/resources/resource_manager_test.go +++ b/flyteadmin/pkg/manager/impl/resources/resource_manager_test.go @@ -83,8 +83,8 @@ func TestUpdateWorkflowAttributes_CreateOrMerge(t *testing.T) { if err != nil { t.Fatal(err) } - assert.Len(t, attributesToBeSaved.GetPluginOverrides().Overrides, 1) - assert.True(t, proto.Equal(attributesToBeSaved.GetPluginOverrides().Overrides[0], &admin.PluginOverride{ + assert.Len(t, attributesToBeSaved.GetPluginOverrides().GetOverrides(), 1) + assert.True(t, proto.Equal(attributesToBeSaved.GetPluginOverrides().GetOverrides()[0], &admin.PluginOverride{ TaskType: "python", PluginId: []string{"plugin a"}})) @@ -127,14 +127,14 @@ func TestUpdateWorkflowAttributes_CreateOrMerge(t *testing.T) { t.Fatal(err) } - assert.Len(t, attributesToBeSaved.GetPluginOverrides().Overrides, 2) - for _, override := range attributesToBeSaved.GetPluginOverrides().Overrides { - if override.TaskType == python { - assert.EqualValues(t, []string{"plugin a"}, override.PluginId) - } else if override.TaskType == hive { - assert.EqualValues(t, []string{"plugin b"}, override.PluginId) + assert.Len(t, attributesToBeSaved.GetPluginOverrides().GetOverrides(), 2) + for _, override := range attributesToBeSaved.GetPluginOverrides().GetOverrides() { + if override.GetTaskType() == python { + assert.EqualValues(t, []string{"plugin a"}, override.GetPluginId()) + } else if override.GetTaskType() == hive { + assert.EqualValues(t, []string{"plugin b"}, override.GetPluginId()) } else { - t.Errorf("Unexpected task type [%s] plugin override committed to db", override.TaskType) + t.Errorf("Unexpected task type [%s] plugin override committed to db", override.GetTaskType()) } } createOrUpdateCalled = true @@ -256,8 +256,8 @@ func TestUpdateProjectDomainAttributes_CreateOrMerge(t *testing.T) { if err != nil { t.Fatal(err) } - assert.Len(t, attributesToBeSaved.GetPluginOverrides().Overrides, 1) - assert.True(t, proto.Equal(attributesToBeSaved.GetPluginOverrides().Overrides[0], &admin.PluginOverride{ + assert.Len(t, attributesToBeSaved.GetPluginOverrides().GetOverrides(), 1) + assert.True(t, proto.Equal(attributesToBeSaved.GetPluginOverrides().GetOverrides()[0], &admin.PluginOverride{ TaskType: python, PluginId: []string{"plugin a"}})) @@ -298,14 +298,14 @@ func TestUpdateProjectDomainAttributes_CreateOrMerge(t *testing.T) { t.Fatal(err) } - assert.Len(t, attributesToBeSaved.GetPluginOverrides().Overrides, 2) - for _, override := range attributesToBeSaved.GetPluginOverrides().Overrides { - if override.TaskType == python { - assert.EqualValues(t, []string{"plugin a"}, override.PluginId) - } else if override.TaskType == hive { - assert.EqualValues(t, []string{"plugin b"}, override.PluginId) + assert.Len(t, attributesToBeSaved.GetPluginOverrides().GetOverrides(), 2) + for _, override := range attributesToBeSaved.GetPluginOverrides().GetOverrides() { + if override.GetTaskType() == python { + assert.EqualValues(t, []string{"plugin a"}, override.GetPluginId()) + } else if override.GetTaskType() == hive { + assert.EqualValues(t, []string{"plugin b"}, override.GetPluginId()) } else { - t.Errorf("Unexpected task type [%s] plugin override committed to db", override.TaskType) + t.Errorf("Unexpected task type [%s] plugin override committed to db", override.GetTaskType()) } } createOrUpdateCalled = true @@ -439,8 +439,8 @@ func TestUpdateProjectAttributes_CreateOrMerge(t *testing.T) { if err != nil { t.Fatal(err) } - assert.Len(t, attributesToBeSaved.GetPluginOverrides().Overrides, 1) - assert.True(t, proto.Equal(attributesToBeSaved.GetPluginOverrides().Overrides[0], &admin.PluginOverride{ + assert.Len(t, attributesToBeSaved.GetPluginOverrides().GetOverrides(), 1) + assert.True(t, proto.Equal(attributesToBeSaved.GetPluginOverrides().GetOverrides()[0], &admin.PluginOverride{ TaskType: python, PluginId: []string{"plugin a"}})) @@ -480,14 +480,14 @@ func TestUpdateProjectAttributes_CreateOrMerge(t *testing.T) { t.Fatal(err) } - assert.Len(t, attributesToBeSaved.GetPluginOverrides().Overrides, 2) - for _, override := range attributesToBeSaved.GetPluginOverrides().Overrides { - if override.TaskType == python { - assert.EqualValues(t, []string{"plugin a"}, override.PluginId) - } else if override.TaskType == hive { - assert.EqualValues(t, []string{"plugin b"}, override.PluginId) + assert.Len(t, attributesToBeSaved.GetPluginOverrides().GetOverrides(), 2) + for _, override := range attributesToBeSaved.GetPluginOverrides().GetOverrides() { + if override.GetTaskType() == python { + assert.EqualValues(t, []string{"plugin a"}, override.GetPluginId()) + } else if override.GetTaskType() == hive { + assert.EqualValues(t, []string{"plugin b"}, override.GetPluginId()) } else { - t.Errorf("Unexpected task type [%s] plugin override committed to db", override.TaskType) + t.Errorf("Unexpected task type [%s] plugin override committed to db", override.GetTaskType()) } } createOrUpdateCalled = true @@ -763,16 +763,16 @@ func TestListAllResources(t *testing.T) { ResourceType: admin.MatchableResource_CLUSTER_RESOURCE, }) assert.Nil(t, err) - assert.NotNil(t, response.Configurations) - assert.Len(t, response.Configurations, 2) + assert.NotNil(t, response.GetConfigurations()) + assert.Len(t, response.GetConfigurations(), 2) assert.True(t, proto.Equal(&admin.MatchableAttributesConfiguration{ Project: "projectA", Attributes: &projectAttributes, - }, response.Configurations[0])) + }, response.GetConfigurations()[0])) assert.True(t, proto.Equal(&admin.MatchableAttributesConfiguration{ Project: "projectB", Domain: "development", Workflow: "workflow", Attributes: &workflowAttributes, - }, response.Configurations[1])) + }, response.GetConfigurations()[1])) } diff --git a/flyteadmin/pkg/manager/impl/signal_manager.go b/flyteadmin/pkg/manager/impl/signal_manager.go index f98edae674..7da9dd5f68 100644 --- a/flyteadmin/pkg/manager/impl/signal_manager.go +++ b/flyteadmin/pkg/manager/impl/signal_manager.go @@ -33,9 +33,9 @@ type SignalManager struct { } func getSignalContext(ctx context.Context, identifier *core.SignalIdentifier) context.Context { - ctx = contextutils.WithProjectDomain(ctx, identifier.ExecutionId.Project, identifier.ExecutionId.Domain) - ctx = contextutils.WithWorkflowID(ctx, identifier.ExecutionId.Name) - return contextutils.WithSignalID(ctx, identifier.SignalId) + ctx = contextutils.WithProjectDomain(ctx, identifier.GetExecutionId().GetProject(), identifier.GetExecutionId().GetDomain()) + ctx = contextutils.WithWorkflowID(ctx, identifier.GetExecutionId().GetName()) + return contextutils.WithSignalID(ctx, identifier.GetSignalId()) } func (s *SignalManager) GetOrCreateSignal(ctx context.Context, request *admin.SignalGetOrCreateRequest) (*admin.Signal, error) { @@ -43,11 +43,11 @@ func (s *SignalManager) GetOrCreateSignal(ctx context.Context, request *admin.Si logger.Debugf(ctx, "invalid request [%+v]: %v", request, err) return nil, err } - ctx = getSignalContext(ctx, request.Id) + ctx = getSignalContext(ctx, request.GetId()) - signalModel, err := transformers.CreateSignalModel(request.Id, request.Type, nil) + signalModel, err := transformers.CreateSignalModel(request.GetId(), request.GetType(), nil) if err != nil { - logger.Errorf(ctx, "Failed to transform signal with id [%+v] and type [+%v] with err: %v", request.Id, request.Type, err) + logger.Errorf(ctx, "Failed to transform signal with id [%+v] and type [+%v] with err: %v", request.GetId(), request.GetType(), err) return nil, err } @@ -70,33 +70,33 @@ func (s *SignalManager) ListSignals(ctx context.Context, request *admin.SignalLi logger.Debugf(ctx, "ListSignals request [%+v] is invalid: %v", request, err) return nil, err } - ctx = getExecutionContext(ctx, request.WorkflowExecutionId) + ctx = getExecutionContext(ctx, request.GetWorkflowExecutionId()) - identifierFilters, err := util.GetWorkflowExecutionIdentifierFilters(ctx, request.WorkflowExecutionId, common.Signal) + identifierFilters, err := util.GetWorkflowExecutionIdentifierFilters(ctx, request.GetWorkflowExecutionId(), common.Signal) if err != nil { return nil, err } - filters, err := util.AddRequestFilters(request.Filters, common.Signal, identifierFilters) + filters, err := util.AddRequestFilters(request.GetFilters(), common.Signal, identifierFilters) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.SignalColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.SignalColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListSignals", request.Token) + "invalid pagination token %s for ListSignals", request.GetToken()) } signalModelList, err := s.db.SignalRepo().List(ctx, repoInterfaces.ListResourceInput{ InlineFilters: filters, Offset: offset, - Limit: int(request.Limit), + Limit: int(request.GetLimit()), SortParameter: sortParameter, }) if err != nil { @@ -111,7 +111,7 @@ func (s *SignalManager) ListSignals(ctx context.Context, request *admin.SignalLi return nil, err } var token string - if len(signalList) == int(request.Limit) { + if len(signalList) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(signalList)) } return &admin.SignalList{ @@ -124,11 +124,11 @@ func (s *SignalManager) SetSignal(ctx context.Context, request *admin.SignalSetR if err := validation.ValidateSignalSetRequest(ctx, s.db, request); err != nil { return nil, err } - ctx = getSignalContext(ctx, request.Id) + ctx = getSignalContext(ctx, request.GetId()) - signalModel, err := transformers.CreateSignalModel(request.Id, nil, request.Value) + signalModel, err := transformers.CreateSignalModel(request.GetId(), nil, request.GetValue()) if err != nil { - logger.Errorf(ctx, "Failed to transform signal with id [%+v] and value [+%v] with err: %v", request.Id, request.Value, err) + logger.Errorf(ctx, "Failed to transform signal with id [%+v] and value [+%v] with err: %v", request.GetId(), request.GetValue(), err) return nil, err } diff --git a/flyteadmin/pkg/manager/impl/task_execution_manager.go b/flyteadmin/pkg/manager/impl/task_execution_manager.go index f8b8e12e21..801d612ab2 100644 --- a/flyteadmin/pkg/manager/impl/task_execution_manager.go +++ b/flyteadmin/pkg/manager/impl/task_execution_manager.go @@ -56,15 +56,15 @@ type TaskExecutionManager struct { } func getTaskExecutionContext(ctx context.Context, identifier *core.TaskExecutionIdentifier) context.Context { - ctx = getNodeExecutionContext(ctx, identifier.NodeExecutionId) - return contextutils.WithTaskID(ctx, fmt.Sprintf("%s-%v", identifier.TaskId.Name, identifier.RetryAttempt)) + ctx = getNodeExecutionContext(ctx, identifier.GetNodeExecutionId()) + return contextutils.WithTaskID(ctx, fmt.Sprintf("%s-%v", identifier.GetTaskId().GetName(), identifier.GetRetryAttempt())) } func (m *TaskExecutionManager) createTaskExecution( ctx context.Context, request *admin.TaskExecutionEventRequest) ( models.TaskExecution, error) { - nodeExecutionID := request.Event.ParentNodeExecutionId + nodeExecutionID := request.GetEvent().GetParentNodeExecutionId() nodeExecutionExists, err := m.db.NodeExecutionRepo().Exists(ctx, repoInterfaces.NodeExecutionResource{ NodeExecutionIdentifier: nodeExecutionID, }) @@ -88,19 +88,19 @@ func (m *TaskExecutionManager) createTaskExecution( StorageClient: m.storageClient, }) if err != nil { - logger.Debugf(ctx, "failed to transform task execution %+v into database model: %v", request.Event.TaskId, err) + logger.Debugf(ctx, "failed to transform task execution %+v into database model: %v", request.GetEvent().GetTaskId(), err) return models.TaskExecution{}, err } if err := m.db.TaskExecutionRepo().Create(ctx, *taskExecutionModel); err != nil { logger.Debugf(ctx, "Failed to create task execution with task id [%+v] with err %v", - request.Event.TaskId, err) + request.GetEvent().GetTaskId(), err) return models.TaskExecution{}, err } m.metrics.TaskExecutionsCreated.Inc() m.metrics.ClosureSizeBytes.Observe(float64(len(taskExecutionModel.Closure))) - logger.Debugf(ctx, "created task execution: %+v", request.Event.TaskId) + logger.Debugf(ctx, "created task execution: %+v", request.GetEvent().GetTaskId()) return *taskExecutionModel, nil } @@ -111,14 +111,14 @@ func (m *TaskExecutionManager) updateTaskExecutionModelState( err := transformers.UpdateTaskExecutionModel(ctx, request, existingTaskExecution, m.config.ApplicationConfiguration().GetRemoteDataConfig().InlineEventDataPolicy, m.storageClient) if err != nil { - logger.Debugf(ctx, "failed to update task execution model [%+v] with err: %v", request.Event.TaskId, err) + logger.Debugf(ctx, "failed to update task execution model [%+v] with err: %v", request.GetEvent().GetTaskId(), err) return models.TaskExecution{}, err } err = m.db.TaskExecutionRepo().Update(ctx, *existingTaskExecution) if err != nil { logger.Debugf(ctx, "Failed to update task execution with task id [%+v] and task execution model [%+v] with err %v", - request.Event.TaskId, existingTaskExecution, err) + request.GetEvent().GetTaskId(), existingTaskExecution, err) return models.TaskExecution{}, err } @@ -132,20 +132,20 @@ func (m *TaskExecutionManager) CreateTaskExecutionEvent(ctx context.Context, req return nil, err } - if err := validation.ValidateClusterForExecutionID(ctx, m.db, request.Event.ParentNodeExecutionId.ExecutionId, request.Event.ProducerId); err != nil { + if err := validation.ValidateClusterForExecutionID(ctx, m.db, request.GetEvent().GetParentNodeExecutionId().GetExecutionId(), request.GetEvent().GetProducerId()); err != nil { return nil, err } // Get the parent node execution, if none found a MissingEntityError will be returned - nodeExecutionID := request.Event.ParentNodeExecutionId + nodeExecutionID := request.GetEvent().GetParentNodeExecutionId() taskExecutionID := &core.TaskExecutionIdentifier{ - TaskId: request.Event.TaskId, + TaskId: request.GetEvent().GetTaskId(), NodeExecutionId: nodeExecutionID, - RetryAttempt: request.Event.RetryAttempt, + RetryAttempt: request.GetEvent().GetRetryAttempt(), } ctx = getTaskExecutionContext(ctx, taskExecutionID) logger.Debugf(ctx, "Received task execution event for [%+v] transitioning to phase [%v]", - taskExecutionID, request.Event.Phase) + taskExecutionID, request.GetEvent().GetPhase()) // See if the task execution exists // - if it does check if the new phase is applicable and then update @@ -166,20 +166,20 @@ func (m *TaskExecutionManager) CreateTaskExecutionEvent(ctx context.Context, req return &admin.TaskExecutionEventResponse{}, nil } - if taskExecutionModel.Phase == request.Event.Phase.String() && - taskExecutionModel.PhaseVersion >= request.Event.PhaseVersion { + if taskExecutionModel.Phase == request.GetEvent().GetPhase().String() && + taskExecutionModel.PhaseVersion >= request.GetEvent().GetPhaseVersion() { logger.Debugf(ctx, "have already recorded task execution phase %s (version: %d) for %v", - request.Event.Phase.String(), request.Event.PhaseVersion, taskExecutionID) + request.GetEvent().GetPhase().String(), request.GetEvent().GetPhaseVersion(), taskExecutionID) return nil, errors.NewFlyteAdminErrorf(codes.AlreadyExists, "have already recorded task execution phase %s (version: %d) for %v", - request.Event.Phase.String(), request.Event.PhaseVersion, taskExecutionID) + request.GetEvent().GetPhase().String(), request.GetEvent().GetPhaseVersion(), taskExecutionID) } currentPhase := core.TaskExecution_Phase(core.TaskExecution_Phase_value[taskExecutionModel.Phase]) if common.IsTaskExecutionTerminal(currentPhase) { // Cannot update a terminal execution. - curPhase := request.Event.Phase.String() - errorMsg := fmt.Sprintf("invalid phase change from %v to %v for task execution %v", taskExecutionModel.Phase, request.Event.Phase, taskExecutionID) + curPhase := request.GetEvent().GetPhase().String() + errorMsg := fmt.Sprintf("invalid phase change from %v to %v for task execution %v", taskExecutionModel.Phase, request.GetEvent().GetPhase(), taskExecutionID) logger.Warnf(ctx, errorMsg) return nil, errors.NewAlreadyInTerminalStateError(ctx, errorMsg, curPhase) } @@ -191,49 +191,49 @@ func (m *TaskExecutionManager) CreateTaskExecutionEvent(ctx context.Context, req return nil, err } - if request.Event.Phase == core.TaskExecution_RUNNING && request.Event.PhaseVersion == 0 { // TODO: need to be careful about missing inc/decs + if request.GetEvent().GetPhase() == core.TaskExecution_RUNNING && request.GetEvent().GetPhaseVersion() == 0 { // TODO: need to be careful about missing inc/decs m.metrics.ActiveTaskExecutions.Inc() - } else if common.IsTaskExecutionTerminal(request.Event.Phase) && request.Event.PhaseVersion == 0 { + } else if common.IsTaskExecutionTerminal(request.GetEvent().GetPhase()) && request.GetEvent().GetPhaseVersion() == 0 { m.metrics.ActiveTaskExecutions.Dec() - m.metrics.TaskExecutionsTerminated.Inc(contextutils.WithPhase(ctx, request.Event.Phase.String())) - if request.Event.GetOutputData() != nil { - m.metrics.TaskExecutionOutputBytes.Observe(float64(proto.Size(request.Event.GetOutputData()))) + m.metrics.TaskExecutionsTerminated.Inc(contextutils.WithPhase(ctx, request.GetEvent().GetPhase().String())) + if request.GetEvent().GetOutputData() != nil { + m.metrics.TaskExecutionOutputBytes.Observe(float64(proto.Size(request.GetEvent().GetOutputData()))) } } if err = m.notificationClient.Publish(ctx, proto.MessageName(request), request); err != nil { m.metrics.PublishEventError.Inc() - logger.Infof(ctx, "error publishing event [%+v] with err: [%v]", request.RequestId, err) + logger.Infof(ctx, "error publishing event [%+v] with err: [%v]", request.GetRequestId(), err) } go func() { ceCtx := context.TODO() if err := m.cloudEventsPublisher.Publish(ceCtx, proto.MessageName(request), request); err != nil { - logger.Errorf(ctx, "error publishing cloud event [%+v] with err: [%v]", request.RequestId, err) + logger.Errorf(ctx, "error publishing cloud event [%+v] with err: [%v]", request.GetRequestId(), err) } }() m.metrics.TaskExecutionEventsCreated.Inc() - logger.Debugf(ctx, "Successfully recorded task execution event [%v]", request.Event) + logger.Debugf(ctx, "Successfully recorded task execution event [%v]", request.GetEvent()) // TODO: we will want to return some scope information here soon! return &admin.TaskExecutionEventResponse{}, nil } func (m *TaskExecutionManager) GetTaskExecution( ctx context.Context, request *admin.TaskExecutionGetRequest) (*admin.TaskExecution, error) { - err := validation.ValidateTaskExecutionIdentifier(request.Id) + err := validation.ValidateTaskExecutionIdentifier(request.GetId()) if err != nil { - logger.Debugf(ctx, "Failed to validate GetTaskExecution [%+v] with err: %v", request.Id, err) + logger.Debugf(ctx, "Failed to validate GetTaskExecution [%+v] with err: %v", request.GetId(), err) return nil, err } - ctx = getTaskExecutionContext(ctx, request.Id) - taskExecutionModel, err := util.GetTaskExecutionModel(ctx, m.db, request.Id) + ctx = getTaskExecutionContext(ctx, request.GetId()) + taskExecutionModel, err := util.GetTaskExecutionModel(ctx, m.db, request.GetId()) if err != nil { return nil, err } taskExecution, err := transformers.FromTaskExecutionModel(*taskExecutionModel, transformers.DefaultExecutionTransformerOptions) if err != nil { - logger.Debugf(ctx, "Failed to transform task execution model [%+v] to proto: %v", request.Id, err) + logger.Debugf(ctx, "Failed to transform task execution model [%+v] to proto: %v", request.GetId(), err) return nil, err } return taskExecution, nil @@ -245,27 +245,27 @@ func (m *TaskExecutionManager) ListTaskExecutions( logger.Debugf(ctx, "ListTaskExecutions request [%+v] is invalid: %v", request, err) return nil, err } - ctx = getNodeExecutionContext(ctx, request.NodeExecutionId) + ctx = getNodeExecutionContext(ctx, request.GetNodeExecutionId()) - identifierFilters, err := util.GetNodeExecutionIdentifierFilters(ctx, request.NodeExecutionId, common.TaskExecution) + identifierFilters, err := util.GetNodeExecutionIdentifierFilters(ctx, request.GetNodeExecutionId(), common.TaskExecution) if err != nil { return nil, err } - filters, err := util.AddRequestFilters(request.Filters, common.TaskExecution, identifierFilters) + filters, err := util.AddRequestFilters(request.GetFilters(), common.TaskExecution, identifierFilters) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.TaskExecutionColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.TaskExecutionColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListTaskExecutions", request.Token) + "invalid pagination token %s for ListTaskExecutions", request.GetToken()) } joinTableEntities := make(map[common.Entity]bool) for _, filter := range filters { @@ -275,7 +275,7 @@ func (m *TaskExecutionManager) ListTaskExecutions( output, err := m.db.TaskExecutionRepo().List(ctx, repoInterfaces.ListResourceInput{ InlineFilters: filters, Offset: offset, - Limit: int(request.Limit), + Limit: int(request.GetLimit()), SortParameter: sortParameter, JoinTableEntities: joinTableEntities, }) @@ -292,7 +292,7 @@ func (m *TaskExecutionManager) ListTaskExecutions( return nil, err } var token string - if len(taskExecutionList) == int(request.Limit) { + if len(taskExecutionList) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(taskExecutionList)) } return &admin.TaskExecutionList{ @@ -303,16 +303,16 @@ func (m *TaskExecutionManager) ListTaskExecutions( func (m *TaskExecutionManager) GetTaskExecutionData( ctx context.Context, request *admin.TaskExecutionGetDataRequest) (*admin.TaskExecutionGetDataResponse, error) { - if err := validation.ValidateTaskExecutionIdentifier(request.Id); err != nil { - logger.Debugf(ctx, "Invalid identifier [%+v]: %v", request.Id, err) + if err := validation.ValidateTaskExecutionIdentifier(request.GetId()); err != nil { + logger.Debugf(ctx, "Invalid identifier [%+v]: %v", request.GetId(), err) } - ctx = getTaskExecutionContext(ctx, request.Id) + ctx = getTaskExecutionContext(ctx, request.GetId()) taskExecution, err := m.GetTaskExecution(ctx, &admin.TaskExecutionGetRequest{ - Id: request.Id, + Id: request.GetId(), }) if err != nil { logger.Debugf(ctx, "Failed to get task execution with id [%+v] with err %v", - request.Id, err) + request.GetId(), err) return nil, err } @@ -322,7 +322,7 @@ func (m *TaskExecutionManager) GetTaskExecutionData( group.Go(func() error { var err error inputs, inputURLBlob, err = util.GetInputs(groupCtx, m.urlData, m.config.ApplicationConfiguration().GetRemoteDataConfig(), - m.storageClient, taskExecution.InputUri) + m.storageClient, taskExecution.GetInputUri()) return err }) @@ -331,7 +331,7 @@ func (m *TaskExecutionManager) GetTaskExecutionData( group.Go(func() error { var err error outputs, outputURLBlob, err = util.GetOutputs(groupCtx, m.urlData, m.config.ApplicationConfiguration().GetRemoteDataConfig(), - m.storageClient, taskExecution.Closure) + m.storageClient, taskExecution.GetClosure()) return err }) @@ -345,14 +345,14 @@ func (m *TaskExecutionManager) GetTaskExecutionData( Outputs: outputURLBlob, FullInputs: inputs, FullOutputs: outputs, - FlyteUrls: common.FlyteURLsFromTaskExecutionID(request.Id, false), + FlyteUrls: common.FlyteURLsFromTaskExecutionID(request.GetId(), false), } - m.metrics.TaskExecutionInputBytes.Observe(float64(response.Inputs.Bytes)) - if response.Outputs.Bytes > 0 { - m.metrics.TaskExecutionOutputBytes.Observe(float64(response.Outputs.Bytes)) - } else if response.FullOutputs != nil { - m.metrics.TaskExecutionOutputBytes.Observe(float64(proto.Size(response.FullOutputs))) + m.metrics.TaskExecutionInputBytes.Observe(float64(response.GetInputs().GetBytes())) + if response.GetOutputs().GetBytes() > 0 { + m.metrics.TaskExecutionOutputBytes.Observe(float64(response.GetOutputs().GetBytes())) + } else if response.GetFullOutputs() != nil { + m.metrics.TaskExecutionOutputBytes.Observe(float64(proto.Size(response.GetFullOutputs()))) } return response, nil } diff --git a/flyteadmin/pkg/manager/impl/task_execution_manager_test.go b/flyteadmin/pkg/manager/impl/task_execution_manager_test.go index 7e2a14131e..939086d63d 100644 --- a/flyteadmin/pkg/manager/impl/task_execution_manager_test.go +++ b/flyteadmin/pkg/manager/impl/task_execution_manager_test.go @@ -72,9 +72,9 @@ func addGetWorkflowExecutionCallback(repository interfaces.Repository) { func(ctx context.Context, input interfaces.Identifier) (models.Execution, error) { return models.Execution{ ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, Cluster: "propeller", }, nil @@ -88,11 +88,11 @@ func addGetNodeExecutionCallback(repository interfaces.Repository) { func(ctx context.Context, input interfaces.NodeExecutionResource) (models.NodeExecution, error) { return models.NodeExecution{ NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, }, nil @@ -105,10 +105,10 @@ func addGetTaskCallback(repository interfaces.Repository) { func(input interfaces.Identifier) (models.Task, error) { return models.Task{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, }, nil }, @@ -126,15 +126,15 @@ func TestCreateTaskEvent(t *testing.T) { repository.TaskExecutionRepo().(*repositoryMocks.MockTaskExecutionRepo).SetGetCallback( func(ctx context.Context, input interfaces.GetTaskExecutionInput) (models.TaskExecution, error) { getTaskCalled = true - assert.Equal(t, core.ResourceType_TASK, input.TaskExecutionID.TaskId.ResourceType) - assert.Equal(t, "task-id", input.TaskExecutionID.TaskId.Name) - assert.Equal(t, "project", input.TaskExecutionID.TaskId.Project) - assert.Equal(t, "domain", input.TaskExecutionID.TaskId.Domain) - assert.Equal(t, "task-v", input.TaskExecutionID.TaskId.Version) - assert.Equal(t, "node-id", input.TaskExecutionID.NodeExecutionId.NodeId) - assert.Equal(t, "project", input.TaskExecutionID.NodeExecutionId.ExecutionId.Project) - assert.Equal(t, "domain", input.TaskExecutionID.NodeExecutionId.ExecutionId.Domain) - assert.Equal(t, "name", input.TaskExecutionID.NodeExecutionId.ExecutionId.Name) + assert.Equal(t, core.ResourceType_TASK, input.TaskExecutionID.GetTaskId().GetResourceType()) + assert.Equal(t, "task-id", input.TaskExecutionID.GetTaskId().GetName()) + assert.Equal(t, "project", input.TaskExecutionID.GetTaskId().GetProject()) + assert.Equal(t, "domain", input.TaskExecutionID.GetTaskId().GetDomain()) + assert.Equal(t, "task-v", input.TaskExecutionID.GetTaskId().GetVersion()) + assert.Equal(t, "node-id", input.TaskExecutionID.GetNodeExecutionId().GetNodeId()) + assert.Equal(t, "project", input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetProject()) + assert.Equal(t, "domain", input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetDomain()) + assert.Equal(t, "name", input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetName()) return models.TaskExecution{}, flyteAdminErrors.NewFlyteAdminError(codes.NotFound, "foo") }) @@ -153,17 +153,17 @@ func TestCreateTaskEvent(t *testing.T) { assert.Equal(t, models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -219,17 +219,17 @@ func TestCreateTaskEvent_Update(t *testing.T) { return models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, }, @@ -266,17 +266,17 @@ func TestCreateTaskEvent_Update(t *testing.T) { assert.EqualValues(t, models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, }, @@ -368,17 +368,17 @@ func TestCreateTaskEvent_UpdateDatabaseError(t *testing.T) { return models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -407,17 +407,17 @@ func TestCreateTaskEvent_UpdateTerminalEventError(t *testing.T) { return models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -458,17 +458,17 @@ func TestCreateTaskEvent_PhaseVersionChange(t *testing.T) { return models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, }, @@ -526,23 +526,23 @@ func TestGetTaskExecution(t *testing.T) { repository.TaskExecutionRepo().(*repositoryMocks.MockTaskExecutionRepo).SetGetCallback( func(ctx context.Context, input interfaces.GetTaskExecutionInput) (models.TaskExecution, error) { getTaskCalled = true - assert.Equal(t, sampleTaskID, input.TaskExecutionID.TaskId) - assert.Equal(t, sampleNodeExecID, input.TaskExecutionID.NodeExecutionId) - assert.Equal(t, uint32(1), input.TaskExecutionID.RetryAttempt) + assert.Equal(t, sampleTaskID, input.TaskExecutionID.GetTaskId()) + assert.Equal(t, sampleNodeExecID, input.TaskExecutionID.GetNodeExecutionId()) + assert.Equal(t, uint32(1), input.TaskExecutionID.GetRetryAttempt()) return models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -581,17 +581,17 @@ func TestGetTaskExecution_TransformerError(t *testing.T) { return models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -752,7 +752,7 @@ func TestListTaskExecutions(t *testing.T) { }, InputUri: "input-uri.pb", Closure: expectedClosure, - }, taskExecutions.TaskExecutions[0])) + }, taskExecutions.GetTaskExecutions()[0])) assert.True(t, proto.Equal(&admin.TaskExecution{ Id: &core.TaskExecutionIdentifier{ RetryAttempt: secondRetryAttempt, @@ -774,7 +774,7 @@ func TestListTaskExecutions(t *testing.T) { }, InputUri: "input-uri2.pb", Closure: expectedClosure, - }, taskExecutions.TaskExecutions[1])) + }, taskExecutions.GetTaskExecutions()[1])) } func TestListTaskExecutions_Filters(t *testing.T) { @@ -925,7 +925,7 @@ func TestListTaskExecutions_Filters(t *testing.T) { }, InputUri: "input-uri.pb", Closure: expectedClosure, - }, taskExecutions.TaskExecutions[0])) + }, taskExecutions.GetTaskExecutions()[0])) assert.True(t, proto.Equal(&admin.TaskExecution{ Id: &core.TaskExecutionIdentifier{ RetryAttempt: secondRetryAttempt, @@ -947,7 +947,7 @@ func TestListTaskExecutions_Filters(t *testing.T) { }, InputUri: "input-uri2.pb", Closure: expectedClosure, - }, taskExecutions.TaskExecutions[1])) + }, taskExecutions.GetTaskExecutions()[1])) } func TestListTaskExecutions_NoFilters(t *testing.T) { @@ -1049,17 +1049,17 @@ func TestGetTaskExecutionData(t *testing.T) { return models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, diff --git a/flyteadmin/pkg/manager/impl/task_manager.go b/flyteadmin/pkg/manager/impl/task_manager.go index 157bcab5cd..7d903e98fb 100644 --- a/flyteadmin/pkg/manager/impl/task_manager.go +++ b/flyteadmin/pkg/manager/impl/task_manager.go @@ -44,44 +44,44 @@ type TaskManager struct { } func getTaskContext(ctx context.Context, identifier *core.Identifier) context.Context { - ctx = contextutils.WithProjectDomain(ctx, identifier.Project, identifier.Domain) - return contextutils.WithTaskID(ctx, identifier.Name) + ctx = contextutils.WithProjectDomain(ctx, identifier.GetProject(), identifier.GetDomain()) + return contextutils.WithTaskID(ctx, identifier.GetName()) } func setDefaults(request *admin.TaskCreateRequest) (*admin.TaskCreateRequest, error) { - if request.Id == nil { + if request.GetId() == nil { return request, errors.NewFlyteAdminError(codes.InvalidArgument, "missing identifier for TaskCreateRequest") } - request.Spec.Template.Id = request.Id + request.Spec.Template.Id = request.GetId() return request, nil } func (t *TaskManager) CreateTask( ctx context.Context, request *admin.TaskCreateRequest) (*admin.TaskCreateResponse, error) { - platformTaskResources := util.GetTaskResources(ctx, request.Id, t.resourceManager, t.config.TaskResourceConfiguration()) + platformTaskResources := util.GetTaskResources(ctx, request.GetId(), t.resourceManager, t.config.TaskResourceConfiguration()) if err := validation.ValidateTask(ctx, request, t.db, platformTaskResources, t.config.WhitelistConfiguration(), t.config.ApplicationConfiguration()); err != nil { - logger.Debugf(ctx, "Task [%+v] failed validation with err: %v", request.Id, err) + logger.Debugf(ctx, "Task [%+v] failed validation with err: %v", request.GetId(), err) return nil, err } - ctx = getTaskContext(ctx, request.Id) + ctx = getTaskContext(ctx, request.GetId()) finalizedRequest, err := setDefaults(request) if err != nil { return nil, err } // Compile task and store the compiled version in the database. - compiledTask, err := t.compiler.CompileTask(finalizedRequest.Spec.Template) + compiledTask, err := t.compiler.CompileTask(finalizedRequest.GetSpec().GetTemplate()) if err != nil { - logger.Debugf(ctx, "Failed to compile task with id [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to compile task with id [%+v] with err %v", request.GetId(), err) return nil, err } createdAt, err := ptypes.TimestampProto(time.Now()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, - "Failed to serialize CreatedAt: %v when creating task: %+v", err, request.Id) + "Failed to serialize CreatedAt: %v when creating task: %+v", err, request.GetId()) } taskDigest, err := util.GetTaskDigest(ctx, compiledTask) if err != nil { @@ -89,7 +89,7 @@ func (t *TaskManager) CreateTask( return nil, err } // See if a task exists and confirm whether it's an identical task or one that with a separate definition. - existingTaskModel, err := util.GetTaskModel(ctx, t.db, request.Spec.Template.Id) + existingTaskModel, err := util.GetTaskModel(ctx, t.db, request.GetSpec().GetTemplate().GetId()) if err == nil { if bytes.Equal(taskDigest, existingTaskModel.Digest) { return nil, errors.NewTaskExistsIdenticalStructureError(ctx, request) @@ -99,7 +99,7 @@ func (t *TaskManager) CreateTask( logger.Errorf(ctx, "failed to transform task from task model") return nil, transformerErr } - return nil, errors.NewTaskExistsDifferentStructureError(ctx, request, existingTask.Closure.GetCompiledTask(), compiledTask) + return nil, errors.NewTaskExistsDifferentStructureError(ctx, request, existingTask.GetClosure().GetCompiledTask(), compiledTask) } taskModel, err := transformers.CreateTaskModel(finalizedRequest, &admin.TaskClosure{ CompiledTask: compiledTask, @@ -111,10 +111,10 @@ func (t *TaskManager) CreateTask( return nil, err } - descriptionModel, err := transformers.CreateDescriptionEntityModel(request.Spec.Description, request.Id) + descriptionModel, err := transformers.CreateDescriptionEntityModel(request.GetSpec().GetDescription(), request.GetId()) if err != nil { logger.Errorf(ctx, - "Failed to transform description model [%+v] with err: %v", request.Spec.Description, err) + "Failed to transform description model [%+v] with err: %v", request.GetSpec().GetDescription(), err) return nil, err } if descriptionModel != nil { @@ -122,15 +122,15 @@ func (t *TaskManager) CreateTask( } err = t.db.TaskRepo().Create(ctx, taskModel, descriptionModel) if err != nil { - logger.Debugf(ctx, "Failed to create task model with id [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to create task model with id [%+v] with err %v", request.GetId(), err) return nil, err } t.metrics.ClosureSizeBytes.Observe(float64(len(taskModel.Closure))) - if finalizedRequest.Spec.Template.Metadata != nil { + if finalizedRequest.GetSpec().GetTemplate().GetMetadata() != nil { contextWithRuntimeMeta := context.WithValue( - ctx, common.RuntimeTypeKey, finalizedRequest.Spec.Template.Metadata.Runtime.Type.String()) + ctx, common.RuntimeTypeKey, finalizedRequest.GetSpec().GetTemplate().GetMetadata().GetRuntime().GetType().String()) contextWithRuntimeMeta = context.WithValue( - contextWithRuntimeMeta, common.RuntimeVersionKey, finalizedRequest.Spec.Template.Metadata.Runtime.Version) + contextWithRuntimeMeta, common.RuntimeVersionKey, finalizedRequest.GetSpec().GetTemplate().GetMetadata().GetRuntime().GetVersion()) t.metrics.Registered.Inc(contextWithRuntimeMeta) } @@ -138,13 +138,13 @@ func (t *TaskManager) CreateTask( } func (t *TaskManager) GetTask(ctx context.Context, request *admin.ObjectGetRequest) (*admin.Task, error) { - if err := validation.ValidateIdentifier(request.Id, common.Task); err != nil { - logger.Debugf(ctx, "invalid identifier [%+v]: %v", request.Id, err) + if err := validation.ValidateIdentifier(request.GetId(), common.Task); err != nil { + logger.Debugf(ctx, "invalid identifier [%+v]: %v", request.GetId(), err) } - ctx = getTaskContext(ctx, request.Id) - task, err := util.GetTask(ctx, t.db, request.Id) + ctx = getTaskContext(ctx, request.GetId()) + task, err := util.GetTask(ctx, t.db, request.GetId()) if err != nil { - logger.Debugf(ctx, "Failed to get task with id [%+v] with err %v", err, request.Id) + logger.Debugf(ctx, "Failed to get task with id [%+v] with err %v", err, request.GetId()) return nil, err } return task, nil @@ -156,13 +156,13 @@ func (t *TaskManager) ListTasks(ctx context.Context, request *admin.ResourceList logger.Debugf(ctx, "Invalid request [%+v]: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Id.Project, request.Id.Domain) - ctx = contextutils.WithTaskID(ctx, request.Id.Name) + ctx = contextutils.WithProjectDomain(ctx, request.GetId().GetProject(), request.GetId().GetDomain()) + ctx = contextutils.WithTaskID(ctx, request.GetId().GetName()) spec := util.FilterSpec{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, - RequestFilters: request.Filters, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), + RequestFilters: request.GetFilters(), } filters, err := util.GetDbFilters(spec, common.Task) @@ -170,26 +170,26 @@ func (t *TaskManager) ListTasks(ctx context.Context, request *admin.ResourceList return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.TaskColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.TaskColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListTasks", request.Token) + "invalid pagination token %s for ListTasks", request.GetToken()) } // And finally, query the database listTasksInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, } output, err := t.db.TaskRepo().List(ctx, listTasksInput) if err != nil { - logger.Debugf(ctx, "Failed to list tasks with id [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to list tasks with id [%+v] with err %v", request.GetId(), err) return nil, err } taskList, err := transformers.FromTaskModels(output.Tasks) @@ -200,7 +200,7 @@ func (t *TaskManager) ListTasks(ctx context.Context, request *admin.ResourceList } var token string - if len(taskList) == int(request.Limit) { + if len(taskList) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(taskList)) } return &admin.TaskList{ @@ -217,27 +217,27 @@ func (t *TaskManager) ListUniqueTaskIdentifiers(ctx context.Context, request *ad logger.Debugf(ctx, "invalid request [%+v]: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Project, request.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetProject(), request.GetDomain()) filters, err := util.GetDbFilters(util.FilterSpec{ - Project: request.Project, - Domain: request.Domain, + Project: request.GetProject(), + Domain: request.GetDomain(), }, common.Task) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.TaskColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.TaskColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListUniqueTaskIdentifiers", request.Token) + "invalid pagination token %s for ListUniqueTaskIdentifiers", request.GetToken()) } listTasksInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, @@ -246,13 +246,13 @@ func (t *TaskManager) ListUniqueTaskIdentifiers(ctx context.Context, request *ad output, err := t.db.TaskRepo().ListTaskIdentifiers(ctx, listTasksInput) if err != nil { logger.Debugf(ctx, "Failed to list tasks ids with project: %s and domain: %s with err %v", - request.Project, request.Domain, err) + request.GetProject(), request.GetDomain(), err) return nil, err } idList := transformers.FromTaskModelsToIdentifiers(output.Tasks) var token string - if len(idList) == int(request.Limit) { + if len(idList) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(idList)) } return &admin.NamedEntityIdentifierList{ diff --git a/flyteadmin/pkg/manager/impl/task_manager_test.go b/flyteadmin/pkg/manager/impl/task_manager_test.go index 4308fc2626..1301444ceb 100644 --- a/flyteadmin/pkg/manager/impl/task_manager_test.go +++ b/flyteadmin/pkg/manager/impl/task_manager_test.go @@ -172,11 +172,11 @@ func TestGetTask(t *testing.T) { Id: &taskIdentifier, }) assert.NoError(t, err) - assert.Equal(t, "project", task.Id.Project) - assert.Equal(t, "domain", task.Id.Domain) - assert.Equal(t, "name", task.Id.Name) - assert.Equal(t, "version", task.Id.Version) - assert.True(t, proto.Equal(testutils.GetTaskClosure(), task.Closure)) + assert.Equal(t, "project", task.GetId().GetProject()) + assert.Equal(t, "domain", task.GetId().GetDomain()) + assert.Equal(t, "name", task.GetId().GetName()) + assert.Equal(t, "version", task.GetId().GetVersion()) + assert.True(t, proto.Equal(testutils.GetTaskClosure(), task.GetClosure())) } func TestGetTask_DatabaseError(t *testing.T) { @@ -287,18 +287,18 @@ func TestListTasks(t *testing.T) { }) assert.NoError(t, err) assert.NotNil(t, taskList) - assert.Len(t, taskList.Tasks, 2) + assert.Len(t, taskList.GetTasks(), 2) - for idx, task := range taskList.Tasks { - assert.Equal(t, projectValue, task.Id.Project) - assert.Equal(t, domainValue, task.Id.Domain) - assert.Equal(t, nameValue, task.Id.Name) - assert.Equal(t, fmt.Sprintf("version %v", idx), task.Id.Version) + for idx, task := range taskList.GetTasks() { + assert.Equal(t, projectValue, task.GetId().GetProject()) + assert.Equal(t, domainValue, task.GetId().GetDomain()) + assert.Equal(t, nameValue, task.GetId().GetName()) + assert.Equal(t, fmt.Sprintf("version %v", idx), task.GetId().GetVersion()) assert.True(t, proto.Equal(&admin.TaskClosure{ CreatedAt: testutils.MockCreatedAtProto, - }, task.Closure)) + }, task.GetClosure())) } - assert.Equal(t, "2", taskList.Token) + assert.Equal(t, "2", taskList.GetToken()) } func TestListTasks_MissingParameters(t *testing.T) { @@ -401,6 +401,6 @@ func TestListUniqueTaskIdentifiers(t *testing.T) { }) assert.NoError(t, err) - assert.Equal(t, 2, len(resp.Entities)) - assert.Empty(t, resp.Token) + assert.Equal(t, 2, len(resp.GetEntities())) + assert.Empty(t, resp.GetToken()) } diff --git a/flyteadmin/pkg/manager/impl/testutils/mock_closures.go b/flyteadmin/pkg/manager/impl/testutils/mock_closures.go index 6554bd6403..945310daf1 100644 --- a/flyteadmin/pkg/manager/impl/testutils/mock_closures.go +++ b/flyteadmin/pkg/manager/impl/testutils/mock_closures.go @@ -17,7 +17,7 @@ var MockCreatedAtProto, _ = ptypes.TimestampProto(MockCreatedAtValue) func GetTaskClosure() *admin.TaskClosure { return &admin.TaskClosure{ CompiledTask: &core.CompiledTask{ - Template: GetValidTaskRequest().Spec.Template, + Template: GetValidTaskRequest().GetSpec().GetTemplate(), }, CreatedAt: MockCreatedAtProto, } @@ -32,11 +32,11 @@ func GetWorkflowClosure() *admin.WorkflowClosure { return &admin.WorkflowClosure{ CompiledWorkflow: &core.CompiledWorkflowClosure{ Primary: &core.CompiledWorkflow{ - Template: GetWorkflowRequest().Spec.Template, + Template: GetWorkflowRequest().GetSpec().GetTemplate(), }, Tasks: []*core.CompiledTask{ { - Template: GetValidTaskRequest().Spec.Template, + Template: GetValidTaskRequest().GetSpec().GetTemplate(), }, }, }, diff --git a/flyteadmin/pkg/manager/impl/testutils/mock_requests.go b/flyteadmin/pkg/manager/impl/testutils/mock_requests.go index b3d01897f1..64ab792220 100644 --- a/flyteadmin/pkg/manager/impl/testutils/mock_requests.go +++ b/flyteadmin/pkg/manager/impl/testutils/mock_requests.go @@ -328,10 +328,10 @@ func GetSampleLpSpecForTest() *admin.LaunchPlanSpec { } func GetWorkflowRequestInterfaceBytes() []byte { - bytes, _ := proto.Marshal(GetWorkflowRequest().Spec.Template.Interface) + bytes, _ := proto.Marshal(GetWorkflowRequest().GetSpec().GetTemplate().GetInterface()) return bytes } func GetWorkflowRequestInterface() *core.TypedInterface { - return GetWorkflowRequest().Spec.Template.Interface + return GetWorkflowRequest().GetSpec().GetTemplate().GetInterface() } diff --git a/flyteadmin/pkg/manager/impl/util/data.go b/flyteadmin/pkg/manager/impl/util/data.go index d6fe14af2e..1827cfd167 100644 --- a/flyteadmin/pkg/manager/impl/util/data.go +++ b/flyteadmin/pkg/manager/impl/util/data.go @@ -21,7 +21,7 @@ const ( func shouldFetchData(config *runtimeInterfaces.RemoteDataConfig, urlBlob *admin.UrlBlob) bool { return config.Scheme == common.Local || config.Scheme == common.None || config.MaxSizeInBytes == 0 || - urlBlob.Bytes < config.MaxSizeInBytes + urlBlob.GetBytes() < config.MaxSizeInBytes } func shouldFetchOutputData(config *runtimeInterfaces.RemoteDataConfig, urlBlob *admin.UrlBlob, outputURI string) bool { diff --git a/flyteadmin/pkg/manager/impl/util/digests.go b/flyteadmin/pkg/manager/impl/util/digests.go index 2846490f71..6fd31273c2 100644 --- a/flyteadmin/pkg/manager/impl/util/digests.go +++ b/flyteadmin/pkg/manager/impl/util/digests.go @@ -17,9 +17,9 @@ func GetLaunchPlanDigest(ctx context.Context, launchPlan *admin.LaunchPlan) ([]b launchPlanDigest, err := pbhash.ComputeHash(ctx, launchPlan) if err != nil { logger.Warningf(ctx, "failed to hash launch plan [%+v] to digest with err %v", - launchPlan.Id, err) + launchPlan.GetId(), err) return nil, errors.NewFlyteAdminErrorf(codes.Internal, - "failed to hash launch plan [%+v] to digest with err %v", launchPlan.Id, err) + "failed to hash launch plan [%+v] to digest with err %v", launchPlan.GetId(), err) } return launchPlanDigest, nil @@ -30,9 +30,9 @@ func GetTaskDigest(ctx context.Context, task *core.CompiledTask) ([]byte, error) taskDigest, err := pbhash.ComputeHash(ctx, task) if err != nil { logger.Warningf(ctx, "failed to hash task [%+v] to digest with err %v", - task.Template.Id, err) + task.GetTemplate().GetId(), err) return nil, errors.NewFlyteAdminErrorf(codes.Internal, - "failed to hash task [%+v] to digest with err %v", task.Template.Id, err) + "failed to hash task [%+v] to digest with err %v", task.GetTemplate().GetId(), err) } return taskDigest, nil @@ -43,9 +43,9 @@ func GetWorkflowDigest(ctx context.Context, workflowClosure *core.CompiledWorkfl workflowDigest, err := pbhash.ComputeHash(ctx, workflowClosure) if err != nil { logger.Warningf(ctx, "failed to hash workflow [%+v] to digest with err %v", - workflowClosure.Primary.Template.Id, err) + workflowClosure.GetPrimary().GetTemplate().GetId(), err) return nil, errors.NewFlyteAdminErrorf(codes.Internal, - "failed to hash workflow [%+v] to digest with err %v", workflowClosure.Primary.Template.Id, err) + "failed to hash workflow [%+v] to digest with err %v", workflowClosure.GetPrimary().GetTemplate().GetId(), err) } return workflowDigest, nil diff --git a/flyteadmin/pkg/manager/impl/util/digests_test.go b/flyteadmin/pkg/manager/impl/util/digests_test.go index ee3ea93d19..870fbd4cbd 100644 --- a/flyteadmin/pkg/manager/impl/util/digests_test.go +++ b/flyteadmin/pkg/manager/impl/util/digests_test.go @@ -149,7 +149,7 @@ func TestGetWorkflowDigest_Unequal(t *testing.T) { workflowWithDifferentNodes, err := getCompiledWorkflow() assert.Nil(t, err) workflowWithDifferentNodes.Primary.Template.Nodes = append( - workflowWithDifferentNodes.Primary.Template.Nodes, &core.Node{ + workflowWithDifferentNodes.GetPrimary().GetTemplate().GetNodes(), &core.Node{ Id: "unexpected", }) workflowDigest, err := GetWorkflowDigest(context.Background(), workflowWithDifferentNodes) diff --git a/flyteadmin/pkg/manager/impl/util/filters.go b/flyteadmin/pkg/manager/impl/util/filters.go index 377dcdab51..b6426a3852 100644 --- a/flyteadmin/pkg/manager/impl/util/filters.go +++ b/flyteadmin/pkg/manager/impl/util/filters.go @@ -274,28 +274,28 @@ func GetWorkflowExecutionIdentifierFilters( ctx context.Context, workflowExecutionIdentifier *core.WorkflowExecutionIdentifier, entity common.Entity) ([]common.InlineFilter, error) { identifierFilters := make([]common.InlineFilter, 3) identifierProjectFilter, err := GetSingleValueEqualityFilter( - entity, shared.Project, workflowExecutionIdentifier.Project) + entity, shared.Project, workflowExecutionIdentifier.GetProject()) if err != nil { logger.Warningf(ctx, "Failed to create execution identifier filter for project: %s with identifier [%+v]", - workflowExecutionIdentifier.Project, workflowExecutionIdentifier) + workflowExecutionIdentifier.GetProject(), workflowExecutionIdentifier) return nil, err } identifierFilters[0] = identifierProjectFilter identifierDomainFilter, err := GetSingleValueEqualityFilter( - entity, shared.Domain, workflowExecutionIdentifier.Domain) + entity, shared.Domain, workflowExecutionIdentifier.GetDomain()) if err != nil { logger.Warningf(ctx, "Failed to create execution identifier filter for domain: %s with identifier [%+v]", - workflowExecutionIdentifier.Domain, workflowExecutionIdentifier) + workflowExecutionIdentifier.GetDomain(), workflowExecutionIdentifier) return nil, err } identifierFilters[1] = identifierDomainFilter identifierNameFilter, err := GetSingleValueEqualityFilter( - entity, shared.Name, workflowExecutionIdentifier.Name) + entity, shared.Name, workflowExecutionIdentifier.GetName()) if err != nil { logger.Warningf(ctx, "Failed to create execution identifier filter for domain: %s with identifier [%+v]", - workflowExecutionIdentifier.Name, workflowExecutionIdentifier) + workflowExecutionIdentifier.GetName(), workflowExecutionIdentifier) return nil, err } identifierFilters[2] = identifierNameFilter @@ -306,15 +306,15 @@ func GetWorkflowExecutionIdentifierFilters( func GetNodeExecutionIdentifierFilters( ctx context.Context, nodeExecutionIdentifier *core.NodeExecutionIdentifier, entity common.Entity) ([]common.InlineFilter, error) { workflowExecutionIdentifierFilters, err := - GetWorkflowExecutionIdentifierFilters(ctx, nodeExecutionIdentifier.ExecutionId, entity) + GetWorkflowExecutionIdentifierFilters(ctx, nodeExecutionIdentifier.GetExecutionId(), entity) if err != nil { return nil, err } nodeIDFilter, err := GetSingleValueEqualityFilter( - entity, shared.NodeID, nodeExecutionIdentifier.NodeId) + entity, shared.NodeID, nodeExecutionIdentifier.GetNodeId()) if err != nil { logger.Warningf(ctx, "Failed to create node execution identifier filter for node id: %s with identifier [%+v]", - nodeExecutionIdentifier.NodeId, nodeExecutionIdentifier) + nodeExecutionIdentifier.GetNodeId(), nodeExecutionIdentifier) } return append(workflowExecutionIdentifierFilters, nodeIDFilter), nil } diff --git a/flyteadmin/pkg/manager/impl/util/resources.go b/flyteadmin/pkg/manager/impl/util/resources.go index 79aadb61b2..cd92bb671d 100644 --- a/flyteadmin/pkg/manager/impl/util/resources.go +++ b/flyteadmin/pkg/manager/impl/util/resources.go @@ -31,16 +31,16 @@ func getTaskResourcesAsSet(ctx context.Context, identifier *core.Identifier, result := runtimeInterfaces.TaskResourceSet{} for _, entry := range resourceEntries { - switch entry.Name { + switch entry.GetName() { case core.Resources_CPU: - result.CPU = parseQuantityNoError(ctx, identifier.String(), fmt.Sprintf("%v.cpu", resourceName), entry.Value) + result.CPU = parseQuantityNoError(ctx, identifier.String(), fmt.Sprintf("%v.cpu", resourceName), entry.GetValue()) case core.Resources_MEMORY: - result.Memory = parseQuantityNoError(ctx, identifier.String(), fmt.Sprintf("%v.memory", resourceName), entry.Value) + result.Memory = parseQuantityNoError(ctx, identifier.String(), fmt.Sprintf("%v.memory", resourceName), entry.GetValue()) case core.Resources_EPHEMERAL_STORAGE: result.EphemeralStorage = parseQuantityNoError(ctx, identifier.String(), - fmt.Sprintf("%v.ephemeral storage", resourceName), entry.Value) + fmt.Sprintf("%v.ephemeral storage", resourceName), entry.GetValue()) case core.Resources_GPU: - result.GPU = parseQuantityNoError(ctx, identifier.String(), "gpu", entry.Value) + result.GPU = parseQuantityNoError(ctx, identifier.String(), "gpu", entry.GetValue()) } } @@ -50,28 +50,28 @@ func getTaskResourcesAsSet(ctx context.Context, identifier *core.Identifier, // GetCompleteTaskResourceRequirements parses the resource requests and limits from the `TaskTemplate` Container. func GetCompleteTaskResourceRequirements(ctx context.Context, identifier *core.Identifier, task *core.CompiledTask) workflowengineInterfaces.TaskResources { return workflowengineInterfaces.TaskResources{ - Defaults: getTaskResourcesAsSet(ctx, identifier, task.GetTemplate().GetContainer().Resources.Requests, "requests"), - Limits: getTaskResourcesAsSet(ctx, identifier, task.GetTemplate().GetContainer().Resources.Limits, "limits"), + Defaults: getTaskResourcesAsSet(ctx, identifier, task.GetTemplate().GetContainer().GetResources().GetRequests(), "requests"), + Limits: getTaskResourcesAsSet(ctx, identifier, task.GetTemplate().GetContainer().GetResources().GetLimits(), "limits"), } } // fromAdminProtoTaskResourceSpec parses the flyteidl `TaskResourceSpec` message into a `TaskResourceSet`. func fromAdminProtoTaskResourceSpec(ctx context.Context, spec *admin.TaskResourceSpec) runtimeInterfaces.TaskResourceSet { result := runtimeInterfaces.TaskResourceSet{} - if len(spec.Cpu) > 0 { - result.CPU = parseQuantityNoError(ctx, "project", "cpu", spec.Cpu) + if len(spec.GetCpu()) > 0 { + result.CPU = parseQuantityNoError(ctx, "project", "cpu", spec.GetCpu()) } - if len(spec.Memory) > 0 { - result.Memory = parseQuantityNoError(ctx, "project", "memory", spec.Memory) + if len(spec.GetMemory()) > 0 { + result.Memory = parseQuantityNoError(ctx, "project", "memory", spec.GetMemory()) } - if len(spec.EphemeralStorage) > 0 { - result.EphemeralStorage = parseQuantityNoError(ctx, "project", "ephemeral storage", spec.EphemeralStorage) + if len(spec.GetEphemeralStorage()) > 0 { + result.EphemeralStorage = parseQuantityNoError(ctx, "project", "ephemeral storage", spec.GetEphemeralStorage()) } - if len(spec.Gpu) > 0 { - result.GPU = parseQuantityNoError(ctx, "project", "gpu", spec.Gpu) + if len(spec.GetGpu()) > 0 { + result.GPU = parseQuantityNoError(ctx, "project", "gpu", spec.GetGpu()) } return result @@ -86,14 +86,14 @@ func GetTaskResources(ctx context.Context, id *core.Identifier, resourceManager request := interfaces.ResourceRequest{ ResourceType: admin.MatchableResource_TASK_RESOURCE, } - if id != nil && len(id.Project) > 0 { - request.Project = id.Project + if id != nil && len(id.GetProject()) > 0 { + request.Project = id.GetProject() } - if id != nil && len(id.Domain) > 0 { - request.Domain = id.Domain + if id != nil && len(id.GetDomain()) > 0 { + request.Domain = id.GetDomain() } - if id != nil && id.ResourceType == core.ResourceType_WORKFLOW && len(id.Name) > 0 { - request.Workflow = id.Name + if id != nil && id.GetResourceType() == core.ResourceType_WORKFLOW && len(id.GetName()) > 0 { + request.Workflow = id.GetName() } resource, err := resourceManager.GetResource(ctx, request) @@ -105,8 +105,8 @@ func GetTaskResources(ctx context.Context, id *core.Identifier, resourceManager logger.Debugf(ctx, "Assigning task requested resources for [%+v]", id) var taskResourceAttributes = workflowengineInterfaces.TaskResources{} if resource != nil && resource.Attributes != nil && resource.Attributes.GetTaskResourceAttributes() != nil { - taskResourceAttributes.Defaults = fromAdminProtoTaskResourceSpec(ctx, resource.Attributes.GetTaskResourceAttributes().Defaults) - taskResourceAttributes.Limits = fromAdminProtoTaskResourceSpec(ctx, resource.Attributes.GetTaskResourceAttributes().Limits) + taskResourceAttributes.Defaults = fromAdminProtoTaskResourceSpec(ctx, resource.Attributes.GetTaskResourceAttributes().GetDefaults()) + taskResourceAttributes.Limits = fromAdminProtoTaskResourceSpec(ctx, resource.Attributes.GetTaskResourceAttributes().GetLimits()) } else { taskResourceAttributes = workflowengineInterfaces.TaskResources{ Defaults: taskResourceConfig.GetDefaults(), diff --git a/flyteadmin/pkg/manager/impl/util/resources_test.go b/flyteadmin/pkg/manager/impl/util/resources_test.go index c163b44e0c..932792f307 100644 --- a/flyteadmin/pkg/manager/impl/util/resources_test.go +++ b/flyteadmin/pkg/manager/impl/util/resources_test.go @@ -44,9 +44,9 @@ func TestGetTaskResources(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.EqualValues(t, request, managerInterfaces.ResourceRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, - Workflow: workflowIdentifier.Name, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), + Workflow: workflowIdentifier.GetName(), ResourceType: admin.MatchableResource_TASK_RESOURCE, }) return &managerInterfaces.ResourceResponse{}, nil @@ -73,9 +73,9 @@ func TestGetTaskResources(t *testing.T) { resourceManager.GetResourceFunc = func(ctx context.Context, request managerInterfaces.ResourceRequest) (*managerInterfaces.ResourceResponse, error) { assert.EqualValues(t, request, managerInterfaces.ResourceRequest{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, - Workflow: workflowIdentifier.Name, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), + Workflow: workflowIdentifier.GetName(), ResourceType: admin.MatchableResource_TASK_RESOURCE, }) return &managerInterfaces.ResourceResponse{ diff --git a/flyteadmin/pkg/manager/impl/util/shared.go b/flyteadmin/pkg/manager/impl/util/shared.go index 8402451200..690ad32fa3 100644 --- a/flyteadmin/pkg/manager/impl/util/shared.go +++ b/flyteadmin/pkg/manager/impl/util/shared.go @@ -22,8 +22,8 @@ import ( ) func GetExecutionName(request *admin.ExecutionCreateRequest) string { - if request.Name != "" { - return request.Name + if request.GetName() != "" { + return request.GetName() } return common.GetExecutionName(time.Now().UnixNano()) } @@ -46,10 +46,10 @@ func GetTask(ctx context.Context, repo repoInterfaces.Repository, identifier *co func GetWorkflowModel( ctx context.Context, repo repoInterfaces.Repository, identifier *core.Identifier) (models.Workflow, error) { workflowModel, err := (repo).WorkflowRepo().Get(ctx, repoInterfaces.Identifier{ - Project: identifier.Project, - Domain: identifier.Domain, - Name: identifier.Name, - Version: identifier.Version, + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Name: identifier.GetName(), + Version: identifier.GetVersion(), }) if err != nil { return models.Workflow{}, err @@ -87,7 +87,7 @@ func GetWorkflow( if err != nil { return nil, err } - closure.CreatedAt = workflow.Closure.CreatedAt + closure.CreatedAt = workflow.GetClosure().GetCreatedAt() workflow.Closure = closure return &workflow, nil } @@ -95,10 +95,10 @@ func GetWorkflow( func GetLaunchPlanModel( ctx context.Context, repo repoInterfaces.Repository, identifier *core.Identifier) (models.LaunchPlan, error) { launchPlanModel, err := (repo).LaunchPlanRepo().Get(ctx, repoInterfaces.Identifier{ - Project: identifier.Project, - Domain: identifier.Domain, - Name: identifier.Name, - Version: identifier.Version, + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Name: identifier.GetName(), + Version: identifier.GetVersion(), }) if err != nil { return models.LaunchPlan{}, err @@ -119,9 +119,9 @@ func GetNamedEntityModel( ctx context.Context, repo repoInterfaces.Repository, resourceType core.ResourceType, identifier *admin.NamedEntityIdentifier) (models.NamedEntity, error) { metadataModel, err := (repo).NamedEntityRepo().Get(ctx, repoInterfaces.GetNamedEntityInput{ ResourceType: resourceType, - Project: identifier.Project, - Domain: identifier.Domain, - Name: identifier.Name, + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Name: identifier.GetName(), }) if err != nil { return models.NamedEntity{}, err @@ -142,11 +142,11 @@ func GetNamedEntity( func GetDescriptionEntityModel( ctx context.Context, repo repoInterfaces.Repository, identifier *core.Identifier) (models.DescriptionEntity, error) { descriptionEntityModel, err := (repo).DescriptionEntityRepo().Get(ctx, repoInterfaces.GetDescriptionEntityInput{ - ResourceType: identifier.ResourceType, - Project: identifier.Project, - Domain: identifier.Domain, - Name: identifier.Name, - Version: identifier.Version, + ResourceType: identifier.GetResourceType(), + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Name: identifier.GetName(), + Version: identifier.GetVersion(), }) if err != nil { return models.DescriptionEntity{}, err @@ -211,9 +211,9 @@ func GetExecutionModel( ctx context.Context, repo repoInterfaces.Repository, identifier *core.WorkflowExecutionIdentifier) ( *models.Execution, error) { executionModel, err := repo.ExecutionRepo().Get(ctx, repoInterfaces.Identifier{ - Project: identifier.Project, - Domain: identifier.Domain, - Name: identifier.Name, + Project: identifier.GetProject(), + Domain: identifier.GetDomain(), + Name: identifier.GetName(), }) if err != nil { return nil, err @@ -236,10 +236,10 @@ func GetNodeExecutionModel(ctx context.Context, repo repoInterfaces.Repository, func GetTaskModel(ctx context.Context, repo repoInterfaces.Repository, taskIdentifier *core.Identifier) ( *models.Task, error) { taskModel, err := repo.TaskRepo().Get(ctx, repoInterfaces.Identifier{ - Project: taskIdentifier.Project, - Domain: taskIdentifier.Domain, - Name: taskIdentifier.Name, - Version: taskIdentifier.Version, + Project: taskIdentifier.GetProject(), + Domain: taskIdentifier.GetDomain(), + Name: taskIdentifier.GetName(), + Version: taskIdentifier.GetVersion(), }) if err != nil { @@ -305,15 +305,15 @@ func MergeIntoExecConfig(workflowExecConfig *admin.WorkflowExecutionConfig, spec // Hence we do a deep check in the following conditions before assignment if (workflowExecConfig.GetRawOutputDataConfig() == nil || len(workflowExecConfig.GetRawOutputDataConfig().GetOutputLocationPrefix()) == 0) && - (spec.GetRawOutputDataConfig() != nil && len(spec.GetRawOutputDataConfig().OutputLocationPrefix) > 0) { + (spec.GetRawOutputDataConfig() != nil && len(spec.GetRawOutputDataConfig().GetOutputLocationPrefix()) > 0) { workflowExecConfig.RawOutputDataConfig = spec.GetRawOutputDataConfig() } - if (workflowExecConfig.GetLabels() == nil || len(workflowExecConfig.GetLabels().Values) == 0) && - (spec.GetLabels() != nil && len(spec.GetLabels().Values) > 0) { + if (workflowExecConfig.GetLabels() == nil || len(workflowExecConfig.GetLabels().GetValues()) == 0) && + (spec.GetLabels() != nil && len(spec.GetLabels().GetValues()) > 0) { workflowExecConfig.Labels = spec.GetLabels() } - if (workflowExecConfig.GetAnnotations() == nil || len(workflowExecConfig.GetAnnotations().Values) == 0) && - (spec.GetAnnotations() != nil && len(spec.GetAnnotations().Values) > 0) { + if (workflowExecConfig.GetAnnotations() == nil || len(workflowExecConfig.GetAnnotations().GetValues()) == 0) && + (spec.GetAnnotations() != nil && len(spec.GetAnnotations().GetValues()) > 0) { workflowExecConfig.Annotations = spec.GetAnnotations() } @@ -325,8 +325,8 @@ func MergeIntoExecConfig(workflowExecConfig *admin.WorkflowExecutionConfig, spec workflowExecConfig.OverwriteCache = spec.GetOverwriteCache() } - if (workflowExecConfig.GetEnvs() == nil || len(workflowExecConfig.GetEnvs().Values) == 0) && - (spec.GetEnvs() != nil && len(spec.GetEnvs().Values) > 0) { + if (workflowExecConfig.GetEnvs() == nil || len(workflowExecConfig.GetEnvs().GetValues()) == 0) && + (spec.GetEnvs() != nil && len(spec.GetEnvs().GetValues()) > 0) { workflowExecConfig.Envs = spec.GetEnvs() } diff --git a/flyteadmin/pkg/manager/impl/util/shared_test.go b/flyteadmin/pkg/manager/impl/util/shared_test.go index b9b296971e..09cb172638 100644 --- a/flyteadmin/pkg/manager/impl/util/shared_test.go +++ b/flyteadmin/pkg/manager/impl/util/shared_test.go @@ -81,10 +81,10 @@ func TestGetTask(t *testing.T) { }) assert.NoError(t, err) assert.NotNil(t, task) - assert.Equal(t, project, task.Id.Project) - assert.Equal(t, domain, task.Id.Domain) - assert.Equal(t, name, task.Id.Name) - assert.Equal(t, version, task.Id.Version) + assert.Equal(t, project, task.GetId().GetProject()) + assert.Equal(t, domain, task.GetId().GetDomain()) + assert.Equal(t, name, task.GetId().GetName()) + assert.Equal(t, version, task.GetId().GetVersion()) } func TestGetTask_DatabaseError(t *testing.T) { @@ -326,10 +326,10 @@ func TestGetLaunchPlan(t *testing.T) { }) assert.Nil(t, err) assert.NotNil(t, launchPlan) - assert.Equal(t, project, launchPlan.Id.Project) - assert.Equal(t, domain, launchPlan.Id.Domain) - assert.Equal(t, name, launchPlan.Id.Name) - assert.Equal(t, version, launchPlan.Id.Version) + assert.Equal(t, project, launchPlan.GetId().GetProject()) + assert.Equal(t, domain, launchPlan.GetId().GetDomain()) + assert.Equal(t, name, launchPlan.GetId().GetName()) + assert.Equal(t, version, launchPlan.GetId().GetVersion()) } func TestGetLaunchPlan_TransformerError(t *testing.T) { @@ -443,11 +443,11 @@ func TestGetNamedEntity(t *testing.T) { }) assert.Nil(t, err) assert.NotNil(t, entity) - assert.Equal(t, project, entity.Id.Project) - assert.Equal(t, domain, entity.Id.Domain) - assert.Equal(t, name, entity.Id.Name) - assert.Equal(t, description, entity.Metadata.Description) - assert.Equal(t, resourceType, entity.ResourceType) + assert.Equal(t, project, entity.GetId().GetProject()) + assert.Equal(t, domain, entity.GetId().GetDomain()) + assert.Equal(t, name, entity.GetId().GetName()) + assert.Equal(t, description, entity.GetMetadata().GetDescription()) + assert.Equal(t, resourceType, entity.GetResourceType()) } func TestGetActiveLaunchPlanVersionFilters(t *testing.T) { @@ -505,7 +505,7 @@ func TestGetMatchableResource(t *testing.T) { } mr, err := GetMatchableResource(context.Background(), resourceManager, resourceType, project, domain, "") - assert.Equal(t, int32(12), mr.Attributes.GetWorkflowExecutionConfig().MaxParallelism) + assert.Equal(t, int32(12), mr.Attributes.GetWorkflowExecutionConfig().GetMaxParallelism()) assert.Nil(t, err) }) t.Run("successful fetch workflow matchable", func(t *testing.T) { @@ -530,7 +530,7 @@ func TestGetMatchableResource(t *testing.T) { } mr, err := GetMatchableResource(context.Background(), resourceManager, resourceType, project, domain, workflow) - assert.Equal(t, int32(12), mr.Attributes.GetWorkflowExecutionConfig().MaxParallelism) + assert.Equal(t, int32(12), mr.Attributes.GetWorkflowExecutionConfig().GetMaxParallelism()) assert.Nil(t, err) }) @@ -614,7 +614,7 @@ func TestGetDescriptionEntity(t *testing.T) { }) assert.Nil(t, err) assert.NotNil(t, entity) - assert.Equal(t, "hello world", entity.ShortDescription) + assert.Equal(t, "hello world", entity.GetShortDescription()) }) t.Run("Failed to get DescriptionEntity", func(t *testing.T) { diff --git a/flyteadmin/pkg/manager/impl/util/single_task_execution.go b/flyteadmin/pkg/manager/impl/util/single_task_execution.go index 036610a9ec..ff7a8b70ce 100644 --- a/flyteadmin/pkg/manager/impl/util/single_task_execution.go +++ b/flyteadmin/pkg/manager/impl/util/single_task_execution.go @@ -48,8 +48,8 @@ func generateWorkflowNameFromTask(taskName string) string { } func generateBindings(outputs *core.VariableMap, nodeID string) []*core.Binding { - bindings := make([]*core.Binding, 0, len(outputs.Variables)) - for key := range outputs.Variables { + bindings := make([]*core.Binding, 0, len(outputs.GetVariables())) + for key := range outputs.GetVariables() { binding := &core.Binding{ Var: key, Binding: &core.BindingData{ @@ -73,16 +73,16 @@ func CreateOrGetWorkflowModel( task *admin.Task) (*models.Workflow, error) { workflowIdentifier := core.Identifier{ ResourceType: core.ResourceType_WORKFLOW, - Project: taskIdentifier.Project, - Domain: taskIdentifier.Domain, - Name: generateWorkflowNameFromTask(taskIdentifier.Name), - Version: taskIdentifier.Version, + Project: taskIdentifier.GetProject(), + Domain: taskIdentifier.GetDomain(), + Name: generateWorkflowNameFromTask(taskIdentifier.GetName()), + Version: taskIdentifier.GetVersion(), } workflowModel, err := db.WorkflowRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, - Name: workflowIdentifier.Name, - Version: workflowIdentifier.Version, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), + Name: workflowIdentifier.GetName(), + Version: workflowIdentifier.GetVersion(), }) var retryStrategy *core.RetryStrategy @@ -100,15 +100,15 @@ func CreateOrGetWorkflowModel( workflowSpec := admin.WorkflowSpec{ Template: &core.WorkflowTemplate{ Id: &workflowIdentifier, - Interface: task.Closure.CompiledTask.Template.Interface, + Interface: task.GetClosure().GetCompiledTask().GetTemplate().GetInterface(), Nodes: []*core.Node{ { - Id: generateNodeNameFromTask(taskIdentifier.Name), + Id: generateNodeNameFromTask(taskIdentifier.GetName()), Metadata: &core.NodeMetadata{ - Name: generateNodeNameFromTask(taskIdentifier.Name), + Name: generateNodeNameFromTask(taskIdentifier.GetName()), Retries: retryStrategy, }, - Inputs: generateBindings(task.Closure.CompiledTask.Template.Interface.Inputs, noInputNodeID), + Inputs: generateBindings(task.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetInputs(), noInputNodeID), Target: &core.Node_TaskNode{ TaskNode: &core.TaskNode{ Reference: &core.TaskNode_ReferenceId{ @@ -119,7 +119,7 @@ func CreateOrGetWorkflowModel( }, }, - Outputs: generateBindings(task.Closure.CompiledTask.Template.Interface.Outputs, generateNodeNameFromTask(taskIdentifier.Name)), + Outputs: generateBindings(task.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetOutputs(), generateNodeNameFromTask(taskIdentifier.GetName())), }, } @@ -138,9 +138,9 @@ func CreateOrGetWorkflowModel( _, err = namedEntityManager.UpdateNamedEntity(ctx, &admin.NamedEntityUpdateRequest{ ResourceType: core.ResourceType_WORKFLOW, Id: &admin.NamedEntityIdentifier{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, - Name: workflowIdentifier.Name, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), + Name: workflowIdentifier.GetName(), }, Metadata: &admin.NamedEntityMetadata{State: admin.NamedEntityState_SYSTEM_GENERATED}, }) @@ -149,10 +149,10 @@ func CreateOrGetWorkflowModel( return nil, err } workflowModel, err = db.WorkflowRepo().Get(ctx, repositoryInterfaces.Identifier{ - Project: workflowIdentifier.Project, - Domain: workflowIdentifier.Domain, - Name: workflowIdentifier.Name, - Version: workflowIdentifier.Version, + Project: workflowIdentifier.GetProject(), + Domain: workflowIdentifier.GetDomain(), + Name: workflowIdentifier.GetName(), + Version: workflowIdentifier.GetVersion(), }) if err != nil { // This is unexpected - at this point we've successfully just created the skeleton workflow. @@ -171,10 +171,10 @@ func CreateOrGetLaunchPlan(ctx context.Context, var err error launchPlanIdentifier := &core.Identifier{ ResourceType: core.ResourceType_LAUNCH_PLAN, - Project: taskIdentifier.Project, - Domain: taskIdentifier.Domain, - Name: generateWorkflowNameFromTask(taskIdentifier.Name), - Version: taskIdentifier.Version, + Project: taskIdentifier.GetProject(), + Domain: taskIdentifier.GetDomain(), + Name: generateWorkflowNameFromTask(taskIdentifier.GetName()), + Version: taskIdentifier.GetVersion(), } launchPlan, err = GetLaunchPlan(ctx, db, launchPlanIdentifier) if err != nil { @@ -188,29 +188,29 @@ func CreateOrGetLaunchPlan(ctx context.Context, Spec: &admin.LaunchPlanSpec{ WorkflowId: &core.Identifier{ ResourceType: core.ResourceType_WORKFLOW, - Project: taskIdentifier.Project, - Domain: taskIdentifier.Domain, - Name: taskIdentifier.Name, - Version: taskIdentifier.Version, + Project: taskIdentifier.GetProject(), + Domain: taskIdentifier.GetDomain(), + Name: taskIdentifier.GetName(), + Version: taskIdentifier.GetVersion(), }, EntityMetadata: &admin.LaunchPlanMetadata{}, DefaultInputs: &core.ParameterMap{}, FixedInputs: &core.LiteralMap{}, Labels: &admin.Labels{}, Annotations: &admin.Annotations{}, - AuthRole: spec.AuthRole, - SecurityContext: spec.SecurityContext, + AuthRole: spec.GetAuthRole(), + SecurityContext: spec.GetSecurityContext(), }, } if err := validation.ValidateLaunchPlan(ctx, generatedCreateLaunchPlanReq, db, config.ApplicationConfiguration(), workflowInterface); err != nil { logger.Debugf(ctx, "could not create launch plan: %+v, request failed validation with err: %v", taskIdentifier, err) return nil, err } - transformedLaunchPlan := transformers.CreateLaunchPlan(generatedCreateLaunchPlanReq, workflowInterface.Outputs) + transformedLaunchPlan := transformers.CreateLaunchPlan(generatedCreateLaunchPlanReq, workflowInterface.GetOutputs()) launchPlan = transformedLaunchPlan launchPlanDigest, err := GetLaunchPlanDigest(ctx, launchPlan) if err != nil { - logger.Errorf(ctx, "failed to compute launch plan digest for [%+v] with err: %v", launchPlan.Id, err) + logger.Errorf(ctx, "failed to compute launch plan digest for [%+v] with err: %v", launchPlan.GetId(), err) return nil, err } launchPlanModel, err := @@ -218,7 +218,7 @@ func CreateOrGetLaunchPlan(ctx context.Context, if err != nil { logger.Errorf(ctx, "Failed to transform launch plan model [%+v], and workflow outputs [%+v] with err: %v", - taskIdentifier, workflowInterface.Outputs, err) + taskIdentifier, workflowInterface.GetOutputs(), err) return nil, err } err = db.LaunchPlanRepo().Create(ctx, launchPlanModel) diff --git a/flyteadmin/pkg/manager/impl/util/single_task_execution_test.go b/flyteadmin/pkg/manager/impl/util/single_task_execution_test.go index 13ed4a945d..d0aff9edef 100644 --- a/flyteadmin/pkg/manager/impl/util/single_task_execution_test.go +++ b/flyteadmin/pkg/manager/impl/util/single_task_execution_test.go @@ -88,13 +88,13 @@ func TestCreateOrGetWorkflowModel(t *testing.T) { mockNamedEntityManager := managerMocks.NamedEntityManager{} mockNamedEntityManager.UpdateNamedEntityFunc = func(ctx context.Context, request *admin.NamedEntityUpdateRequest) (*admin.NamedEntityUpdateResponse, error) { - assert.Equal(t, request.ResourceType, core.ResourceType_WORKFLOW) - assert.True(t, proto.Equal(request.Id, &admin.NamedEntityIdentifier{ + assert.Equal(t, request.GetResourceType(), core.ResourceType_WORKFLOW) + assert.True(t, proto.Equal(request.GetId(), &admin.NamedEntityIdentifier{ Project: "flytekit", Domain: "production", Name: ".flytegen.app.workflows.MyWorkflow.my_task", - }), fmt.Sprintf("%+v", request.Id)) - assert.True(t, proto.Equal(request.Metadata, &admin.NamedEntityMetadata{ + }), fmt.Sprintf("%+v", request.GetId())) + assert.True(t, proto.Equal(request.GetMetadata(), &admin.NamedEntityMetadata{ State: admin.NamedEntityState_SYSTEM_GENERATED, })) return &admin.NamedEntityUpdateResponse{}, nil @@ -102,13 +102,13 @@ func TestCreateOrGetWorkflowModel(t *testing.T) { mockWorkflowManager := managerMocks.MockWorkflowManager{} mockWorkflowManager.SetCreateCallback(func(ctx context.Context, request *admin.WorkflowCreateRequest) (*admin.WorkflowCreateResponse, error) { - assert.True(t, proto.Equal(request.Id, &core.Identifier{ + assert.True(t, proto.Equal(request.GetId(), &core.Identifier{ ResourceType: core.ResourceType_WORKFLOW, Project: "flytekit", Domain: "production", Name: ".flytegen.app.workflows.MyWorkflow.my_task", Version: "12345", - }), fmt.Sprintf("%+v", request.Id)) + }), fmt.Sprintf("%+v", request.GetId())) assert.Len(t, request.GetSpec().GetTemplate().GetNodes(), 1) assert.Equal(t, request.GetSpec().GetTemplate().GetNodes()[0].GetMetadata().GetRetries().GetRetries(), uint32(2)) @@ -220,13 +220,13 @@ func TestCreateOrGetLaunchPlan(t *testing.T) { mockNamedEntityManager := managerMocks.NamedEntityManager{} mockNamedEntityManager.UpdateNamedEntityFunc = func(ctx context.Context, request *admin.NamedEntityUpdateRequest) (*admin.NamedEntityUpdateResponse, error) { - assert.Equal(t, request.ResourceType, core.ResourceType_LAUNCH_PLAN) - assert.True(t, proto.Equal(request.Id, &admin.NamedEntityIdentifier{ + assert.Equal(t, request.GetResourceType(), core.ResourceType_LAUNCH_PLAN) + assert.True(t, proto.Equal(request.GetId(), &admin.NamedEntityIdentifier{ Project: "flytekit", Domain: "production", Name: ".flytegen.app.workflows.MyWorkflow.my_task", - }), fmt.Sprintf("%+v", request.Id)) - assert.True(t, proto.Equal(request.Metadata, &admin.NamedEntityMetadata{ + }), fmt.Sprintf("%+v", request.GetId())) + assert.True(t, proto.Equal(request.GetMetadata(), &admin.NamedEntityMetadata{ State: admin.NamedEntityState_SYSTEM_GENERATED, })) return &admin.NamedEntityUpdateResponse{}, nil @@ -256,7 +256,7 @@ func TestCreateOrGetLaunchPlan(t *testing.T) { Domain: "production", Name: ".flytegen.app.workflows.MyWorkflow.my_task", Version: "12345", - }, launchPlan.Id)) - assert.True(t, proto.Equal(launchPlan.Closure.ExpectedOutputs, workflowInterface.Outputs)) - assert.True(t, proto.Equal(launchPlan.Spec.AuthRole, spec.AuthRole)) + }, launchPlan.GetId())) + assert.True(t, proto.Equal(launchPlan.GetClosure().GetExpectedOutputs(), workflowInterface.GetOutputs())) + assert.True(t, proto.Equal(launchPlan.GetSpec().GetAuthRole(), spec.GetAuthRole())) } diff --git a/flyteadmin/pkg/manager/impl/validation/attributes_validator.go b/flyteadmin/pkg/manager/impl/validation/attributes_validator.go index bfaccd80a1..99929513b5 100644 --- a/flyteadmin/pkg/manager/impl/validation/attributes_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/attributes_validator.go @@ -42,15 +42,15 @@ func ValidateProjectDomainAttributesUpdateRequest(ctx context.Context, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration, request *admin.ProjectDomainAttributesUpdateRequest) ( admin.MatchableResource, error) { - if request.Attributes == nil { + if request.GetAttributes() == nil { return defaultMatchableResource, shared.GetMissingArgumentError(shared.Attributes) } - if err := ValidateProjectAndDomain(ctx, db, config, request.Attributes.Project, request.Attributes.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetAttributes().GetProject(), request.GetAttributes().GetDomain()); err != nil { return defaultMatchableResource, err } - return validateMatchingAttributes(request.Attributes.MatchingAttributes, - fmt.Sprintf("%s-%s", request.Attributes.Project, request.Attributes.Domain)) + return validateMatchingAttributes(request.GetAttributes().GetMatchingAttributes(), + fmt.Sprintf("%s-%s", request.GetAttributes().GetProject(), request.GetAttributes().GetDomain())) } func ValidateProjectAttributesUpdateRequest(ctx context.Context, @@ -58,19 +58,19 @@ func ValidateProjectAttributesUpdateRequest(ctx context.Context, request *admin.ProjectAttributesUpdateRequest) ( admin.MatchableResource, error) { - if request.Attributes == nil { + if request.GetAttributes() == nil { return defaultMatchableResource, shared.GetMissingArgumentError(shared.Attributes) } - if err := ValidateProjectForUpdate(ctx, db, request.Attributes.Project); err != nil { + if err := ValidateProjectForUpdate(ctx, db, request.GetAttributes().GetProject()); err != nil { return defaultMatchableResource, err } - return validateMatchingAttributes(request.Attributes.MatchingAttributes, request.Attributes.Project) + return validateMatchingAttributes(request.GetAttributes().GetMatchingAttributes(), request.GetAttributes().GetProject()) } func ValidateProjectDomainAttributesGetRequest(ctx context.Context, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration, request *admin.ProjectDomainAttributesGetRequest) error { - if err := ValidateProjectAndDomain(ctx, db, config, request.Project, request.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetProject(), request.GetDomain()); err != nil { return err } @@ -79,7 +79,7 @@ func ValidateProjectDomainAttributesGetRequest(ctx context.Context, db repositor func ValidateProjectDomainAttributesDeleteRequest(ctx context.Context, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration, request *admin.ProjectDomainAttributesDeleteRequest) error { - if err := ValidateProjectAndDomain(ctx, db, config, request.Project, request.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetProject(), request.GetDomain()); err != nil { return err } @@ -89,26 +89,26 @@ func ValidateProjectDomainAttributesDeleteRequest(ctx context.Context, db reposi func ValidateWorkflowAttributesUpdateRequest(ctx context.Context, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration, request *admin.WorkflowAttributesUpdateRequest) ( admin.MatchableResource, error) { - if request.Attributes == nil { + if request.GetAttributes() == nil { return defaultMatchableResource, shared.GetMissingArgumentError(shared.Attributes) } - if err := ValidateProjectAndDomain(ctx, db, config, request.Attributes.Project, request.Attributes.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetAttributes().GetProject(), request.GetAttributes().GetDomain()); err != nil { return defaultMatchableResource, err } - if err := ValidateEmptyStringField(request.Attributes.Workflow, shared.Name); err != nil { + if err := ValidateEmptyStringField(request.GetAttributes().GetWorkflow(), shared.Name); err != nil { return defaultMatchableResource, err } - return validateMatchingAttributes(request.Attributes.MatchingAttributes, - fmt.Sprintf("%s-%s-%s", request.Attributes.Project, request.Attributes.Domain, request.Attributes.Workflow)) + return validateMatchingAttributes(request.GetAttributes().GetMatchingAttributes(), + fmt.Sprintf("%s-%s-%s", request.GetAttributes().GetProject(), request.GetAttributes().GetDomain(), request.GetAttributes().GetWorkflow())) } func ValidateWorkflowAttributesGetRequest(ctx context.Context, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration, request *admin.WorkflowAttributesGetRequest) error { - if err := ValidateProjectAndDomain(ctx, db, config, request.Project, request.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetProject(), request.GetDomain()); err != nil { return err } - if err := ValidateEmptyStringField(request.Workflow, shared.Name); err != nil { + if err := ValidateEmptyStringField(request.GetWorkflow(), shared.Name); err != nil { return err } @@ -117,10 +117,10 @@ func ValidateWorkflowAttributesGetRequest(ctx context.Context, db repositoryInte func ValidateWorkflowAttributesDeleteRequest(ctx context.Context, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration, request *admin.WorkflowAttributesDeleteRequest) error { - if err := ValidateProjectAndDomain(ctx, db, config, request.Project, request.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetProject(), request.GetDomain()); err != nil { return err } - if err := ValidateEmptyStringField(request.Workflow, shared.Name); err != nil { + if err := ValidateEmptyStringField(request.GetWorkflow(), shared.Name); err != nil { return err } @@ -128,7 +128,7 @@ func ValidateWorkflowAttributesDeleteRequest(ctx context.Context, db repositoryI } func ValidateListAllMatchableAttributesRequest(request *admin.ListMatchableAttributesRequest) error { - if _, ok := admin.MatchableResource_name[int32(request.ResourceType)]; !ok { + if _, ok := admin.MatchableResource_name[int32(request.GetResourceType())]; !ok { return shared.GetInvalidArgumentError(shared.ResourceType) } return nil diff --git a/flyteadmin/pkg/manager/impl/validation/execution_validator.go b/flyteadmin/pkg/manager/impl/validation/execution_validator.go index e776650364..2d852c5a97 100644 --- a/flyteadmin/pkg/manager/impl/validation/execution_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/execution_validator.go @@ -28,47 +28,47 @@ var acceptedReferenceLaunchTypes = map[core.ResourceType]interface{}{ func ValidateExecutionRequest(ctx context.Context, request *admin.ExecutionCreateRequest, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration) error { - if err := ValidateEmptyStringField(request.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(request.GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(request.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(request.GetDomain(), shared.Domain); err != nil { return err } - if request.Name != "" { - if err := CheckValidExecutionID(strings.ToLower(request.Name), shared.Name); err != nil { + if request.GetName() != "" { + if err := CheckValidExecutionID(strings.ToLower(request.GetName()), shared.Name); err != nil { return err } } - if len(request.Name) > allowedExecutionNameLength { + if len(request.GetName()) > allowedExecutionNameLength { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "name for ExecutionCreateRequest [%+v] exceeded allowed length %d", request, allowedExecutionNameLength) } - if err := ValidateProjectAndDomain(ctx, db, config, request.Project, request.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetProject(), request.GetDomain()); err != nil { return err } - if request.Spec == nil { + if request.GetSpec() == nil { return shared.GetMissingArgumentError(shared.Spec) } // TODO(katrogan): Change the name of Spec.LaunchPlan to something more generic to permit reference Tasks. // https://github.com/flyteorg/flyte/issues/262 - if err := ValidateIdentifierFieldsSet(request.Spec.LaunchPlan); err != nil { + if err := ValidateIdentifierFieldsSet(request.GetSpec().GetLaunchPlan()); err != nil { return err } - if _, ok := acceptedReferenceLaunchTypes[request.Spec.LaunchPlan.ResourceType]; !ok { + if _, ok := acceptedReferenceLaunchTypes[request.GetSpec().GetLaunchPlan().GetResourceType()]; !ok { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "Invalid reference entity resource type [%v], only [%+v] allowed", - request.Spec.LaunchPlan.ResourceType, acceptedReferenceLaunchTypes) + request.GetSpec().GetLaunchPlan().GetResourceType(), acceptedReferenceLaunchTypes) } - if err := validateLiteralMap(request.Inputs, shared.Inputs); err != nil { + if err := validateLiteralMap(request.GetInputs(), shared.Inputs); err != nil { return err } - if request.Spec.GetNotifications() != nil { - if err := validateNotifications(request.Spec.GetNotifications().Notifications); err != nil { + if request.GetSpec().GetNotifications() != nil { + if err := validateNotifications(request.GetSpec().GetNotifications().GetNotifications()); err != nil { return err } } - if err := validateLabels(request.Spec.Labels); err != nil { + if err := validateLabels(request.GetSpec().GetLabels()); err != nil { return err } return nil @@ -135,14 +135,14 @@ func CheckValidExecutionID(executionID, fieldName string) error { } func ValidateCreateWorkflowEventRequest(request *admin.WorkflowExecutionEventRequest, maxOutputSizeInBytes int64) error { - if request.Event == nil { + if request.GetEvent() == nil { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "Workflow event handler was called without event") - } else if request.Event.ExecutionId == nil { + } else if request.GetEvent().GetExecutionId() == nil { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "Workflow event handler request event doesn't have an execution id - %v", request.Event) + "Workflow event handler request event doesn't have an execution id - %v", request.GetEvent()) } - if err := ValidateOutputData(request.Event.GetOutputData(), maxOutputSizeInBytes); err != nil { + if err := ValidateOutputData(request.GetEvent().GetOutputData(), maxOutputSizeInBytes); err != nil { return err } return nil @@ -152,13 +152,13 @@ func ValidateWorkflowExecutionIdentifier(identifier *core.WorkflowExecutionIdent if identifier == nil { return shared.GetMissingArgumentError(shared.ID) } - if err := ValidateEmptyStringField(identifier.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(identifier.GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(identifier.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(identifier.GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateEmptyStringField(identifier.Name, shared.Name); err != nil { + if err := ValidateEmptyStringField(identifier.GetName(), shared.Name); err != nil { return err } return nil diff --git a/flyteadmin/pkg/manager/impl/validation/execution_validator_test.go b/flyteadmin/pkg/manager/impl/validation/execution_validator_test.go index 90858a008c..fcca3b0316 100644 --- a/flyteadmin/pkg/manager/impl/validation/execution_validator_test.go +++ b/flyteadmin/pkg/manager/impl/validation/execution_validator_test.go @@ -90,9 +90,9 @@ func TestGetExecutionInputs(t *testing.T) { lpRequest := testutils.GetLaunchPlanRequest() actualInputs, err := CheckAndFetchInputsForExecution( - executionRequest.Inputs, - lpRequest.Spec.FixedInputs, - lpRequest.Spec.DefaultInputs, + executionRequest.GetInputs(), + lpRequest.GetSpec().GetFixedInputs(), + lpRequest.GetSpec().GetDefaultInputs(), ) expectedMap := &core.LiteralMap{ Literals: map[string]*core.Literal{ @@ -123,9 +123,9 @@ func TestGetExecutionWithOffloadedInputs(t *testing.T) { lpRequest := testutils.GetLaunchPlanRequest() actualInputs, err := CheckAndFetchInputsForExecution( - executionRequest.Inputs, - lpRequest.Spec.FixedInputs, - lpRequest.Spec.DefaultInputs, + executionRequest.GetInputs(), + lpRequest.GetSpec().GetFixedInputs(), + lpRequest.GetSpec().GetDefaultInputs(), ) expectedMap := core.LiteralMap{ Literals: map[string]*core.Literal{ @@ -135,8 +135,8 @@ func TestGetExecutionWithOffloadedInputs(t *testing.T) { } assert.Nil(t, err) assert.NotNil(t, actualInputs) - assert.EqualValues(t, expectedMap.GetLiterals()["foo"], actualInputs.Literals["foo"]) - assert.EqualValues(t, expectedMap.GetLiterals()["bar"], actualInputs.Literals["bar"]) + assert.EqualValues(t, expectedMap.GetLiterals()["foo"], actualInputs.GetLiterals()["foo"]) + assert.EqualValues(t, expectedMap.GetLiterals()["bar"], actualInputs.GetLiterals()["bar"]) } func TestValidateExecInputsWrongType(t *testing.T) { @@ -148,9 +148,9 @@ func TestValidateExecInputsWrongType(t *testing.T) { }, } _, err := CheckAndFetchInputsForExecution( - executionRequest.Inputs, - lpRequest.Spec.FixedInputs, - lpRequest.Spec.DefaultInputs, + executionRequest.GetInputs(), + lpRequest.GetSpec().GetFixedInputs(), + lpRequest.GetSpec().GetDefaultInputs(), ) utils.AssertEqualWithSanitizedRegex(t, "invalid foo input wrong type. Expected simple:STRING, but got literal scalar: {primitive:{integer:1}}", err.Error()) } @@ -165,9 +165,9 @@ func TestValidateExecInputsExtraInputs(t *testing.T) { }, } _, err := CheckAndFetchInputsForExecution( - executionRequest.Inputs, - lpRequest.Spec.FixedInputs, - lpRequest.Spec.DefaultInputs, + executionRequest.GetInputs(), + lpRequest.GetSpec().GetFixedInputs(), + lpRequest.GetSpec().GetDefaultInputs(), ) assert.EqualError(t, err, "invalid input foo-extra") } @@ -182,9 +182,9 @@ func TestValidateExecInputsOverrideFixed(t *testing.T) { }, } _, err := CheckAndFetchInputsForExecution( - executionRequest.Inputs, - lpRequest.Spec.FixedInputs, - lpRequest.Spec.DefaultInputs, + executionRequest.GetInputs(), + lpRequest.GetSpec().GetFixedInputs(), + lpRequest.GetSpec().GetDefaultInputs(), ) assert.EqualError(t, err, "invalid input bar") } @@ -194,9 +194,9 @@ func TestValidateExecEmptyInputs(t *testing.T) { lpRequest := testutils.GetLaunchPlanRequest() executionRequest.Inputs = nil actualInputs, err := CheckAndFetchInputsForExecution( - executionRequest.Inputs, - lpRequest.Spec.FixedInputs, - lpRequest.Spec.DefaultInputs, + executionRequest.GetInputs(), + lpRequest.GetSpec().GetFixedInputs(), + lpRequest.GetSpec().GetDefaultInputs(), ) expectedMap := &core.LiteralMap{ Literals: map[string]*core.Literal{ diff --git a/flyteadmin/pkg/manager/impl/validation/launch_plan_validator.go b/flyteadmin/pkg/manager/impl/validation/launch_plan_validator.go index a65d140633..0168bb066c 100644 --- a/flyteadmin/pkg/manager/impl/validation/launch_plan_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/launch_plan_validator.go @@ -19,36 +19,36 @@ import ( func ValidateLaunchPlan(ctx context.Context, request *admin.LaunchPlanCreateRequest, db repositoryInterfaces.Repository, config runtimeInterfaces.ApplicationConfiguration, workflowInterface *core.TypedInterface) error { - if err := ValidateIdentifier(request.Id, common.LaunchPlan); err != nil { + if err := ValidateIdentifier(request.GetId(), common.LaunchPlan); err != nil { return err } - if err := ValidateProjectAndDomain(ctx, db, config, request.Id.Project, request.Id.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetId().GetProject(), request.GetId().GetDomain()); err != nil { return err } - if request.Spec == nil { + if request.GetSpec() == nil { return shared.GetMissingArgumentError(shared.Spec) } - if err := ValidateIdentifier(request.Spec.WorkflowId, common.Workflow); err != nil { + if err := ValidateIdentifier(request.GetSpec().GetWorkflowId(), common.Workflow); err != nil { return err } - if err := validateLabels(request.Spec.Labels); err != nil { + if err := validateLabels(request.GetSpec().GetLabels()); err != nil { return err } - if err := validateLiteralMap(request.Spec.FixedInputs, shared.FixedInputs); err != nil { + if err := validateLiteralMap(request.GetSpec().GetFixedInputs(), shared.FixedInputs); err != nil { return err } if config.GetTopLevelConfig().FeatureGates.EnableArtifacts { - if err := validateParameterMapAllowArtifacts(request.Spec.DefaultInputs, shared.DefaultInputs); err != nil { + if err := validateParameterMapAllowArtifacts(request.GetSpec().GetDefaultInputs(), shared.DefaultInputs); err != nil { return err } } else { - if err := validateParameterMapDisableArtifacts(request.Spec.DefaultInputs, shared.DefaultInputs); err != nil { + if err := validateParameterMapDisableArtifacts(request.GetSpec().GetDefaultInputs(), shared.DefaultInputs); err != nil { return err } } - expectedInputs, err := checkAndFetchExpectedInputForLaunchPlan(workflowInterface.GetInputs(), request.Spec.FixedInputs, request.Spec.DefaultInputs) + expectedInputs, err := checkAndFetchExpectedInputForLaunchPlan(workflowInterface.GetInputs(), request.GetSpec().GetFixedInputs(), request.GetSpec().GetDefaultInputs()) if err != nil { return err } @@ -58,8 +58,8 @@ func ValidateLaunchPlan(ctx context.Context, // Augment default inputs with the unbound workflow inputs. request.Spec.DefaultInputs = expectedInputs - if request.Spec.EntityMetadata != nil { - if err := validateNotifications(request.Spec.EntityMetadata.Notifications); err != nil { + if request.GetSpec().GetEntityMetadata() != nil { + if err := validateNotifications(request.GetSpec().GetEntityMetadata().GetNotifications()); err != nil { return err } if request.GetSpec().GetEntityMetadata().GetLaunchConditions() != nil { @@ -74,7 +74,7 @@ func ValidateLaunchPlan(ctx context.Context, func validateSchedule(request *admin.LaunchPlanCreateRequest, expectedInputs *core.ParameterMap) error { schedule := request.GetSpec().GetEntityMetadata().GetSchedule() if schedule.GetCronExpression() != "" || schedule.GetRate() != nil || schedule.GetCronSchedule() != nil { - for key, value := range expectedInputs.Parameters { + for key, value := range expectedInputs.GetParameters() { if value.GetRequired() && key != schedule.GetKickoffTimeInputArg() { return errors.NewFlyteAdminErrorf( codes.InvalidArgument, @@ -82,7 +82,7 @@ func validateSchedule(request *admin.LaunchPlanCreateRequest, expectedInputs *co } } if schedule.GetKickoffTimeInputArg() != "" { - if param, ok := expectedInputs.Parameters[schedule.GetKickoffTimeInputArg()]; !ok { + if param, ok := expectedInputs.GetParameters()[schedule.GetKickoffTimeInputArg()]; !ok { return errors.NewFlyteAdminErrorf( codes.InvalidArgument, "Cannot create a schedule with a KickoffTimeInputArg that does not point to a free input. [%v] is not free or does not exist.", schedule.GetKickoffTimeInputArg()) @@ -125,7 +125,7 @@ func checkAndFetchExpectedInputForLaunchPlan( } // If there are no inputs that the workflow requires, there should be none at launch plan as well - if workflowVariableMap == nil || len(workflowVariableMap.Variables) == 0 { + if workflowVariableMap == nil || len(workflowVariableMap.GetVariables()) == 0 { if len(defaultInputMap) > 0 { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid launch plan default inputs, expected none but found %d", len(defaultInputMap)) @@ -139,7 +139,7 @@ func checkAndFetchExpectedInputForLaunchPlan( }, nil } - workflowExpectedInputMap = workflowVariableMap.Variables + workflowExpectedInputMap = workflowVariableMap.GetVariables() for name, defaultInput := range defaultInputMap { value, ok := workflowExpectedInputMap[name] if !ok { diff --git a/flyteadmin/pkg/manager/impl/validation/launch_plan_validator_test.go b/flyteadmin/pkg/manager/impl/validation/launch_plan_validator_test.go index d5757704da..5ae9101746 100644 --- a/flyteadmin/pkg/manager/impl/validation/launch_plan_validator_test.go +++ b/flyteadmin/pkg/manager/impl/validation/launch_plan_validator_test.go @@ -20,7 +20,7 @@ const ( var lpApplicationConfig = testutils.GetApplicationConfigWithDefaultDomains() func getWorkflowInterface() *core.TypedInterface { - return testutils.GetSampleWorkflowSpecForTest().Template.Interface + return testutils.GetSampleWorkflowSpecForTest().GetTemplate().GetInterface() } func TestValidateLpEmptyProject(t *testing.T) { diff --git a/flyteadmin/pkg/manager/impl/validation/named_entity_validator.go b/flyteadmin/pkg/manager/impl/validation/named_entity_validator.go index e9af05f527..3b8fb6963e 100644 --- a/flyteadmin/pkg/manager/impl/validation/named_entity_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/named_entity_validator.go @@ -13,46 +13,46 @@ import ( var archivableResourceTypes = sets.NewInt32(int32(core.ResourceType_WORKFLOW), int32(core.ResourceType_TASK), int32(core.ResourceType_LAUNCH_PLAN)) func ValidateNamedEntityGetRequest(request *admin.NamedEntityGetRequest) error { - if err := ValidateResourceType(request.ResourceType); err != nil { + if err := ValidateResourceType(request.GetResourceType()); err != nil { return err } - if err := ValidateNamedEntityIdentifier(request.Id); err != nil { + if err := ValidateNamedEntityIdentifier(request.GetId()); err != nil { return err } return nil } func ValidateNamedEntityUpdateRequest(request *admin.NamedEntityUpdateRequest) error { - if err := ValidateResourceType(request.ResourceType); err != nil { + if err := ValidateResourceType(request.GetResourceType()); err != nil { return err } - if err := ValidateNamedEntityIdentifier(request.Id); err != nil { + if err := ValidateNamedEntityIdentifier(request.GetId()); err != nil { return err } - if request.Metadata == nil { + if request.GetMetadata() == nil { return shared.GetMissingArgumentError(shared.Metadata) } // Only tasks and workflow resources can be modified from the default state. - if request.Metadata.State != admin.NamedEntityState_NAMED_ENTITY_ACTIVE && - !archivableResourceTypes.Has(int32(request.ResourceType)) { + if request.GetMetadata().GetState() != admin.NamedEntityState_NAMED_ENTITY_ACTIVE && + !archivableResourceTypes.Has(int32(request.GetResourceType())) { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "Resource [%s] cannot have its state updated", request.ResourceType.String()) + "Resource [%s] cannot have its state updated", request.GetResourceType().String()) } return nil } func ValidateNamedEntityListRequest(request *admin.NamedEntityListRequest) error { - if err := ValidateEmptyStringField(request.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(request.GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(request.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(request.GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateResourceType(request.ResourceType); err != nil { + if err := ValidateResourceType(request.GetResourceType()); err != nil { return err } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil diff --git a/flyteadmin/pkg/manager/impl/validation/node_execution_validator.go b/flyteadmin/pkg/manager/impl/validation/node_execution_validator.go index 9ac920d143..c48dde85cd 100644 --- a/flyteadmin/pkg/manager/impl/validation/node_execution_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/node_execution_validator.go @@ -11,66 +11,66 @@ func ValidateNodeExecutionIdentifier(identifier *core.NodeExecutionIdentifier) e if identifier == nil { return shared.GetMissingArgumentError(shared.ID) } - if identifier.ExecutionId == nil { + if identifier.GetExecutionId() == nil { return shared.GetMissingArgumentError(shared.ExecutionID) } - if identifier.NodeId == "" { + if identifier.GetNodeId() == "" { return shared.GetMissingArgumentError(shared.NodeID) } - return ValidateWorkflowExecutionIdentifier(identifier.ExecutionId) + return ValidateWorkflowExecutionIdentifier(identifier.GetExecutionId()) } // Validates that NodeExecutionEventRequests handled by admin include a valid node execution identifier. // In the case the event specifies a DynamicWorkflow in the TaskNodeMetadata, this method also validates the contents of // the dynamic workflow. func ValidateNodeExecutionEventRequest(request *admin.NodeExecutionEventRequest, maxOutputSizeInBytes int64) error { - if request.Event == nil { + if request.GetEvent() == nil { return shared.GetMissingArgumentError(shared.Event) } - err := ValidateNodeExecutionIdentifier(request.Event.Id) + err := ValidateNodeExecutionIdentifier(request.GetEvent().GetId()) if err != nil { return err } - if request.Event.GetTaskNodeMetadata() != nil && request.Event.GetTaskNodeMetadata().DynamicWorkflow != nil { - dynamicWorkflowNodeMetadata := request.Event.GetTaskNodeMetadata().DynamicWorkflow - if err := ValidateIdentifier(dynamicWorkflowNodeMetadata.Id, common.Workflow); err != nil { + if request.GetEvent().GetTaskNodeMetadata() != nil && request.GetEvent().GetTaskNodeMetadata().GetDynamicWorkflow() != nil { + dynamicWorkflowNodeMetadata := request.GetEvent().GetTaskNodeMetadata().GetDynamicWorkflow() + if err := ValidateIdentifier(dynamicWorkflowNodeMetadata.GetId(), common.Workflow); err != nil { return err } - if dynamicWorkflowNodeMetadata.CompiledWorkflow == nil { + if dynamicWorkflowNodeMetadata.GetCompiledWorkflow() == nil { return shared.GetMissingArgumentError("compiled dynamic workflow") } - if dynamicWorkflowNodeMetadata.CompiledWorkflow.Primary == nil { + if dynamicWorkflowNodeMetadata.GetCompiledWorkflow().GetPrimary() == nil { return shared.GetMissingArgumentError("primary dynamic workflow") } - if dynamicWorkflowNodeMetadata.CompiledWorkflow.Primary.Template == nil { + if dynamicWorkflowNodeMetadata.GetCompiledWorkflow().GetPrimary().GetTemplate() == nil { return shared.GetMissingArgumentError("primary dynamic workflow template") } - if err := ValidateIdentifier(dynamicWorkflowNodeMetadata.CompiledWorkflow.Primary.Template.Id, common.Workflow); err != nil { + if err := ValidateIdentifier(dynamicWorkflowNodeMetadata.GetCompiledWorkflow().GetPrimary().GetTemplate().GetId(), common.Workflow); err != nil { return err } } - if err := ValidateOutputData(request.Event.GetOutputData(), maxOutputSizeInBytes); err != nil { + if err := ValidateOutputData(request.GetEvent().GetOutputData(), maxOutputSizeInBytes); err != nil { return err } return nil } func ValidateNodeExecutionListRequest(request *admin.NodeExecutionListRequest) error { - if err := ValidateWorkflowExecutionIdentifier(request.WorkflowExecutionId); err != nil { + if err := ValidateWorkflowExecutionIdentifier(request.GetWorkflowExecutionId()); err != nil { return shared.GetMissingArgumentError(shared.ExecutionID) } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil } func ValidateNodeExecutionForTaskListRequest(request *admin.NodeExecutionForTaskListRequest) error { - if err := ValidateTaskExecutionIdentifier(request.TaskExecutionId); err != nil { + if err := ValidateTaskExecutionIdentifier(request.GetTaskExecutionId()); err != nil { return err } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil diff --git a/flyteadmin/pkg/manager/impl/validation/notifications_validator.go b/flyteadmin/pkg/manager/impl/validation/notifications_validator.go index 8b8f9a68e7..6dfad3a166 100644 --- a/flyteadmin/pkg/manager/impl/validation/notifications_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/notifications_validator.go @@ -23,22 +23,22 @@ func validateNotifications(notifications []*admin.Notification) error { for _, notif := range notifications { switch { case notif.GetEmail() != nil: - if err := validateRecipientsEmail(notif.GetEmail().RecipientsEmail); err != nil { + if err := validateRecipientsEmail(notif.GetEmail().GetRecipientsEmail()); err != nil { return err } case notif.GetSlack() != nil: - if err := validateRecipientsEmail(notif.GetSlack().RecipientsEmail); err != nil { + if err := validateRecipientsEmail(notif.GetSlack().GetRecipientsEmail()); err != nil { return err } case notif.GetPagerDuty() != nil: - if err := validateRecipientsEmail(notif.GetPagerDuty().RecipientsEmail); err != nil { + if err := validateRecipientsEmail(notif.GetPagerDuty().GetRecipientsEmail()); err != nil { return err } default: return shared.GetInvalidArgumentError("notification type") } - for _, phase := range notif.Phases { + for _, phase := range notif.GetPhases() { if !common.IsExecutionTerminal(phase) { return shared.GetInvalidArgumentError("phase") } diff --git a/flyteadmin/pkg/manager/impl/validation/project_validator.go b/flyteadmin/pkg/manager/impl/validation/project_validator.go index 76bab900c1..fbdd6a0ca2 100644 --- a/flyteadmin/pkg/manager/impl/validation/project_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/project_validator.go @@ -21,40 +21,40 @@ const maxDescriptionLength = 300 const maxLabelArrayLength = 16 func ValidateProjectRegisterRequest(request *admin.ProjectRegisterRequest) error { - if request.Project == nil { + if request.GetProject() == nil { return shared.GetMissingArgumentError(shared.Project) } - project := request.Project - if err := ValidateEmptyStringField(project.Name, projectName); err != nil { + project := request.GetProject() + if err := ValidateEmptyStringField(project.GetName(), projectName); err != nil { return err } return ValidateProject(project) } func ValidateProjectGetRequest(request *admin.ProjectGetRequest) error { - if err := ValidateEmptyStringField(request.Id, projectID); err != nil { + if err := ValidateEmptyStringField(request.GetId(), projectID); err != nil { return err } return nil } func ValidateProject(project *admin.Project) error { - if err := ValidateEmptyStringField(project.Id, projectID); err != nil { + if err := ValidateEmptyStringField(project.GetId(), projectID); err != nil { return err } - if err := validateLabels(project.Labels); err != nil { + if err := validateLabels(project.GetLabels()); err != nil { return err } - if errs := validation.IsDNS1123Label(project.Id); len(errs) > 0 { - return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid project id [%s]: %v", project.Id, errs) + if errs := validation.IsDNS1123Label(project.GetId()); len(errs) > 0 { + return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid project id [%s]: %v", project.GetId(), errs) } - if err := ValidateMaxLengthStringField(project.Name, projectName, maxNameLength); err != nil { + if err := ValidateMaxLengthStringField(project.GetName(), projectName, maxNameLength); err != nil { return err } - if err := ValidateMaxLengthStringField(project.Description, projectDescription, maxDescriptionLength); err != nil { + if err := ValidateMaxLengthStringField(project.GetDescription(), projectDescription, maxDescriptionLength); err != nil { return err } - if project.Domains != nil { + if project.GetDomains() != nil { return errors.NewFlyteAdminError(codes.InvalidArgument, "Domains are currently only set system wide. Please retry without domains included in your request.") } diff --git a/flyteadmin/pkg/manager/impl/validation/shared_execution.go b/flyteadmin/pkg/manager/impl/validation/shared_execution.go index 07e2a26fb0..1ee17d1b8c 100644 --- a/flyteadmin/pkg/manager/impl/validation/shared_execution.go +++ b/flyteadmin/pkg/manager/impl/validation/shared_execution.go @@ -14,9 +14,9 @@ import ( // ValidateClusterForExecutionID validates that the execution denoted by executionId is recorded as executing on `cluster`. func ValidateClusterForExecutionID(ctx context.Context, db repoInterfaces.Repository, executionID *core.WorkflowExecutionIdentifier, clusterInEvent string) error { workflowExecution, err := db.ExecutionRepo().Get(ctx, repoInterfaces.Identifier{ - Project: executionID.Project, - Domain: executionID.Domain, - Name: executionID.Name, + Project: executionID.GetProject(), + Domain: executionID.GetDomain(), + Name: executionID.GetName(), }) if err != nil { logger.Debugf(ctx, "Failed to find existing execution with id [%+v] with err: %v", executionID, err) diff --git a/flyteadmin/pkg/manager/impl/validation/signal_validator.go b/flyteadmin/pkg/manager/impl/validation/signal_validator.go index 7412ec0d2f..e7ac9a7133 100644 --- a/flyteadmin/pkg/manager/impl/validation/signal_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/signal_validator.go @@ -15,13 +15,13 @@ import ( ) func ValidateSignalGetOrCreateRequest(ctx context.Context, request *admin.SignalGetOrCreateRequest) error { - if request.Id == nil { + if request.GetId() == nil { return shared.GetMissingArgumentError("id") } - if err := ValidateSignalIdentifier(request.Id); err != nil { + if err := ValidateSignalIdentifier(request.GetId()); err != nil { return err } - if request.Type == nil { + if request.GetType() == nil { return shared.GetMissingArgumentError("type") } @@ -29,39 +29,39 @@ func ValidateSignalGetOrCreateRequest(ctx context.Context, request *admin.Signal } func ValidateSignalIdentifier(identifier *core.SignalIdentifier) error { - if identifier.ExecutionId == nil { + if identifier.GetExecutionId() == nil { return shared.GetMissingArgumentError(shared.ExecutionID) } - if identifier.SignalId == "" { + if identifier.GetSignalId() == "" { return shared.GetMissingArgumentError("signal_id") } - return ValidateWorkflowExecutionIdentifier(identifier.ExecutionId) + return ValidateWorkflowExecutionIdentifier(identifier.GetExecutionId()) } func ValidateSignalListRequest(ctx context.Context, request *admin.SignalListRequest) error { - if err := ValidateWorkflowExecutionIdentifier(request.WorkflowExecutionId); err != nil { + if err := ValidateWorkflowExecutionIdentifier(request.GetWorkflowExecutionId()); err != nil { return shared.GetMissingArgumentError(shared.ExecutionID) } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil } func ValidateSignalSetRequest(ctx context.Context, db repositoryInterfaces.Repository, request *admin.SignalSetRequest) error { - if request.Id == nil { + if request.GetId() == nil { return shared.GetMissingArgumentError("id") } - if err := ValidateSignalIdentifier(request.Id); err != nil { + if err := ValidateSignalIdentifier(request.GetId()); err != nil { return err } - if request.Value == nil { + if request.GetValue() == nil { return shared.GetMissingArgumentError("value") } // validate that signal value matches type of existing signal - signalModel, err := transformers.CreateSignalModel(request.Id, nil, nil) + signalModel, err := transformers.CreateSignalModel(request.GetId(), nil, nil) if err != nil { return nil } @@ -75,10 +75,10 @@ func ValidateSignalSetRequest(ctx context.Context, db repositoryInterfaces.Repos if err != nil { return err } - if !propellervalidators.IsInstance(request.Value, lookupSignal.Type) { + if !propellervalidators.IsInstance(request.GetValue(), lookupSignal.GetType()) { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "requested signal value [%v] is not castable to existing signal type [%v]", - request.Value, lookupSignalModel.Type) + request.GetValue(), lookupSignalModel.Type) } return nil diff --git a/flyteadmin/pkg/manager/impl/validation/task_execution_validator.go b/flyteadmin/pkg/manager/impl/validation/task_execution_validator.go index dfe80541b0..dee4b86c3b 100644 --- a/flyteadmin/pkg/manager/impl/validation/task_execution_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/task_execution_validator.go @@ -8,20 +8,20 @@ import ( ) func ValidateTaskExecutionRequest(request *admin.TaskExecutionEventRequest, maxOutputSizeInBytes int64) error { - if request.Event == nil { + if request.GetEvent() == nil { return shared.GetMissingArgumentError(shared.Event) } - if request.Event.OccurredAt == nil { + if request.GetEvent().GetOccurredAt() == nil { return shared.GetMissingArgumentError(shared.OccurredAt) } - if err := ValidateOutputData(request.Event.GetOutputData(), maxOutputSizeInBytes); err != nil { + if err := ValidateOutputData(request.GetEvent().GetOutputData(), maxOutputSizeInBytes); err != nil { return err } return ValidateTaskExecutionIdentifier(&core.TaskExecutionIdentifier{ - TaskId: request.Event.TaskId, - NodeExecutionId: request.Event.ParentNodeExecutionId, - RetryAttempt: request.Event.RetryAttempt, + TaskId: request.GetEvent().GetTaskId(), + NodeExecutionId: request.GetEvent().GetParentNodeExecutionId(), + RetryAttempt: request.GetEvent().GetRetryAttempt(), }) } @@ -29,19 +29,19 @@ func ValidateTaskExecutionIdentifier(identifier *core.TaskExecutionIdentifier) e if identifier == nil { return shared.GetMissingArgumentError(shared.ID) } - if identifier.NodeExecutionId == nil { + if identifier.GetNodeExecutionId() == nil { return shared.GetMissingArgumentError(shared.NodeExecutionID) } - if err := ValidateNodeExecutionIdentifier(identifier.NodeExecutionId); err != nil { + if err := ValidateNodeExecutionIdentifier(identifier.GetNodeExecutionId()); err != nil { return err } - if identifier.TaskId == nil { + if identifier.GetTaskId() == nil { return shared.GetMissingArgumentError(shared.TaskID) } - if err := ValidateIdentifier(identifier.TaskId, common.Task); err != nil { + if err := ValidateIdentifier(identifier.GetTaskId(), common.Task); err != nil { return err } @@ -49,10 +49,10 @@ func ValidateTaskExecutionIdentifier(identifier *core.TaskExecutionIdentifier) e } func ValidateTaskExecutionListRequest(request *admin.TaskExecutionListRequest) error { - if err := ValidateNodeExecutionIdentifier(request.NodeExecutionId); err != nil { + if err := ValidateNodeExecutionIdentifier(request.GetNodeExecutionId()); err != nil { return err } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil diff --git a/flyteadmin/pkg/manager/impl/validation/task_validator.go b/flyteadmin/pkg/manager/impl/validation/task_validator.go index 0f0f86fb0b..991048d97e 100644 --- a/flyteadmin/pkg/manager/impl/validation/task_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/task_validator.go @@ -26,17 +26,17 @@ var whitelistedTaskErr = errors.NewFlyteAdminErrorf(codes.InvalidArgument, "task // This is called for a task with a non-nil container. func validateContainer(task *core.TaskTemplate, platformTaskResources workflowengineInterfaces.TaskResources) error { - if err := ValidateEmptyStringField(task.GetContainer().Image, shared.Image); err != nil { + if err := ValidateEmptyStringField(task.GetContainer().GetImage(), shared.Image); err != nil { return err } - if task.GetContainer().Resources == nil { + if task.GetContainer().GetResources() == nil { return nil } - if err := validateTaskResources(task.Id, platformTaskResources.Limits, task.GetContainer().Resources.Requests, - task.GetContainer().Resources.Limits); err != nil { + if err := validateTaskResources(task.GetId(), platformTaskResources.Limits, task.GetContainer().GetResources().GetRequests(), + task.GetContainer().GetResources().GetLimits()); err != nil { logger.Debugf(context.Background(), "encountered errors validating task resources for [%+v]: %v", - task.Id, err) + task.GetId(), err) return err } return nil @@ -44,23 +44,23 @@ func validateContainer(task *core.TaskTemplate, platformTaskResources workflowen // This is called for a task with a non-nil k8s pod. func validateK8sPod(task *core.TaskTemplate, platformTaskResources workflowengineInterfaces.TaskResources) error { - if task.GetK8SPod().PodSpec == nil { + if task.GetK8SPod().GetPodSpec() == nil { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid TaskSpecification, pod tasks should specify their target as a K8sPod with a defined pod spec") } var podSpec corev1.PodSpec - if err := utils.UnmarshalStructToObj(task.GetK8SPod().PodSpec, &podSpec); err != nil { + if err := utils.UnmarshalStructToObj(task.GetK8SPod().GetPodSpec(), &podSpec); err != nil { logger.Debugf(context.Background(), "failed to unmarshal k8s podspec [%+v]: %v", - task.GetK8SPod().PodSpec, err) + task.GetK8SPod().GetPodSpec(), err) return err } platformTaskResourceLimits := taskResourceSetToMap(platformTaskResources.Limits) for _, container := range podSpec.Containers { - err := validateResource(task.Id, resourceListToQuantity(container.Resources.Requests), + err := validateResource(task.GetId(), resourceListToQuantity(container.Resources.Requests), resourceListToQuantity(container.Resources.Limits), platformTaskResourceLimits) if err != nil { logger.Debugf(context.Background(), "encountered errors validating task resources for [%+v]: %v", - task.Id, err) + task.GetId(), err) return err } } @@ -69,7 +69,7 @@ func validateK8sPod(task *core.TaskTemplate, platformTaskResources workflowengin } func validateRuntimeMetadata(metadata *core.RuntimeMetadata) error { - if err := ValidateEmptyStringField(metadata.Version, shared.RuntimeVersion); err != nil { + if err := ValidateEmptyStringField(metadata.GetVersion(), shared.RuntimeVersion); err != nil { return err } return nil @@ -78,21 +78,21 @@ func validateRuntimeMetadata(metadata *core.RuntimeMetadata) error { func validateTaskTemplate(taskID *core.Identifier, task *core.TaskTemplate, platformTaskResources workflowengineInterfaces.TaskResources, whitelistConfig runtime.WhitelistConfiguration) error { - if err := ValidateEmptyStringField(task.Type, shared.Type); err != nil { + if err := ValidateEmptyStringField(task.GetType(), shared.Type); err != nil { return err } - if err := validateTaskType(taskID, task.Type, whitelistConfig); err != nil { + if err := validateTaskType(taskID, task.GetType(), whitelistConfig); err != nil { return err } - if task.Metadata == nil { + if task.GetMetadata() == nil { return shared.GetMissingArgumentError(shared.Metadata) } - if task.Metadata.Runtime != nil { - if err := validateRuntimeMetadata(task.Metadata.Runtime); err != nil { + if task.GetMetadata().GetRuntime() != nil { + if err := validateRuntimeMetadata(task.GetMetadata().GetRuntime()); err != nil { return err } } - if task.Interface == nil { + if task.GetInterface() == nil { // The actual interface proto has nothing to validate. return shared.GetMissingArgumentError(shared.TypedInterface) } @@ -110,16 +110,16 @@ func ValidateTask( ctx context.Context, request *admin.TaskCreateRequest, db repositoryInterfaces.Repository, platformTaskResources workflowengineInterfaces.TaskResources, whitelistConfig runtime.WhitelistConfiguration, applicationConfig runtime.ApplicationConfiguration) error { - if err := ValidateIdentifier(request.Id, common.Task); err != nil { + if err := ValidateIdentifier(request.GetId(), common.Task); err != nil { return err } - if err := ValidateProjectAndDomain(ctx, db, applicationConfig, request.Id.Project, request.Id.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, applicationConfig, request.GetId().GetProject(), request.GetId().GetDomain()); err != nil { return err } - if request.Spec == nil || request.Spec.Template == nil { + if request.GetSpec() == nil || request.GetSpec().GetTemplate() == nil { return shared.GetMissingArgumentError(shared.Spec) } - return validateTaskTemplate(request.Id, request.Spec.Template, platformTaskResources, whitelistConfig) + return validateTaskTemplate(request.GetId(), request.GetSpec().GetTemplate(), platformTaskResources, whitelistConfig) } func taskResourceSetToMap( @@ -143,18 +143,18 @@ func taskResourceSetToMap( func addResourceEntryToMap( identifier *core.Identifier, entry *core.Resources_ResourceEntry, resourceEntries *map[core.Resources_ResourceName]resource.Quantity) error { - if _, ok := (*resourceEntries)[entry.Name]; ok { + if _, ok := (*resourceEntries)[entry.GetName()]; ok { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "can't specify %v limit for task [%+v] multiple times", entry.Name, identifier) + "can't specify %v limit for task [%+v] multiple times", entry.GetName(), identifier) } - quantity, err := resource.ParseQuantity(entry.Value) + quantity, err := resource.ParseQuantity(entry.GetValue()) if err != nil { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "Parsing of %v request failed for value %v - reason %v. "+ "Please follow K8s conventions for resources "+ - "https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", entry.Name, entry.Value, err) + "https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", entry.GetName(), entry.GetValue(), err) } - (*resourceEntries)[entry.Name] = quantity + (*resourceEntries)[entry.GetName()] = quantity return nil } @@ -184,7 +184,7 @@ func requestedResourcesToQuantity( var requestedToQuantity = make(map[core.Resources_ResourceName]resource.Quantity) for _, limitEntry := range resources { - switch limitEntry.Name { + switch limitEntry.GetName() { case core.Resources_CPU: fallthrough case core.Resources_MEMORY: @@ -199,7 +199,7 @@ func requestedResourcesToQuantity( } if !isWholeNumber(requestedToQuantity[core.Resources_GPU]) { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "gpu for [%+v] must be a whole number, got: %s instead", identifier, limitEntry.Value) + "gpu for [%+v] must be a whole number, got: %s instead", identifier, limitEntry.GetValue()) } case core.Resources_EPHEMERAL_STORAGE: err := addResourceEntryToMap(identifier, limitEntry, &requestedToQuantity) @@ -252,15 +252,14 @@ func validateResource(identifier *core.Identifier, requestedResourceDefaults, if ok && platformLimitOk && limitQuantity.Value() > platformLimit.Value() { // Also check that the requested limit is less than the platform task limit. return errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "Requested %v limit [%v] is greater than current limit set in the platform configuration"+ - " [%v]. Please contact Flyte Admins to change these limits or consult the configuration", + "Requested %v limit [%v] is greater than current limit set in the platform configuration [%v]. Please contact Flyte Admins to change these limits or consult the configuration", resourceName, limitQuantity.String(), platformLimit.String()) } if platformLimitOk && defaultQuantity.Value() > platformTaskResourceLimits[resourceName].Value() { // Also check that the requested limit is less than the platform task limit. return errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "Requested %v default [%v] is greater than current limit set in the platform configuration"+ - " [%v]. Please contact Flyte Admins to change these limits or consult the configuration", + "Requested %v default [%v] is greater than current limit set in the platform configuration [%v]. Please contact Flyte Admins to change these limits or consult the configuration", + resourceName, defaultQuantity.String(), platformTaskResourceLimits[resourceName].String()) } case core.Resources_GPU: @@ -273,8 +272,7 @@ func validateResource(identifier *core.Identifier, requestedResourceDefaults, platformLimit, platformLimitOk := platformTaskResourceLimits[resourceName] if platformLimitOk && defaultQuantity.Value() > platformLimit.Value() { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "Requested %v default [%v] is greater than current limit set in the platform configuration"+ - " [%v]. Please contact Flyte Admins to change these limits or consult the configuration", + "Requested %v default [%v] is greater than current limit set in the platform configuration [%v]. Please contact Flyte Admins to change these limits or consult the configuration", resourceName, defaultQuantity.String(), platformLimit.String()) } } @@ -295,14 +293,14 @@ func validateTaskType(taskID *core.Identifier, taskType string, whitelistConfig if scope.Project == "" { // All projects whitelisted return nil - } else if scope.Project != taskID.Project { + } else if scope.Project != taskID.GetProject() { continue } // We have a potential match! Verify that this task type is approved given the specificity of the whitelist. if scope.Domain == "" { // All domains for this project are whitelisted return nil - } else if scope.Domain == taskID.Domain { + } else if scope.Domain == taskID.GetDomain() { return nil } diff --git a/flyteadmin/pkg/manager/impl/validation/validation.go b/flyteadmin/pkg/manager/impl/validation/validation.go index 1535eb4a79..5dd73793e4 100644 --- a/flyteadmin/pkg/manager/impl/validation/validation.go +++ b/flyteadmin/pkg/manager/impl/validation/validation.go @@ -50,10 +50,10 @@ func ValidateMaxMapLengthField(m map[string]string, fieldName string, limit int) } func validateLabels(labels *admin.Labels) error { - if labels == nil || len(labels.Values) == 0 { + if labels == nil || len(labels.GetValues()) == 0 { return nil } - if err := ValidateMaxMapLengthField(labels.Values, "labels", maxLabelArrayLength); err != nil { + if err := ValidateMaxMapLengthField(labels.GetValues(), "labels", maxLabelArrayLength); err != nil { return err } if err := validateLabelsAlphanumeric(labels); err != nil { @@ -65,7 +65,7 @@ func validateLabels(labels *admin.Labels) error { // Given an admin.Labels, checks if the labels exist or not and if it does, checks if the labels are K8s compliant, // i.e. alphanumeric + - and _ func validateLabelsAlphanumeric(labels *admin.Labels) error { - for key, value := range labels.Values { + for key, value := range labels.GetValues() { if errs := validation.IsQualifiedName(key); len(errs) > 0 { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "invalid label key [%s]: %v", key, errs) } @@ -80,16 +80,16 @@ func ValidateIdentifierFieldsSet(id *core.Identifier) error { if id == nil { return shared.GetMissingArgumentError(shared.ID) } - if err := ValidateEmptyStringField(id.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(id.GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(id.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(id.GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateEmptyStringField(id.Name, shared.Name); err != nil { + if err := ValidateEmptyStringField(id.GetName(), shared.Name); err != nil { return err } - if err := ValidateEmptyStringField(id.Version, shared.Version); err != nil { + if err := ValidateEmptyStringField(id.GetVersion(), shared.Version); err != nil { return err } return nil @@ -100,10 +100,10 @@ func ValidateIdentifier(id *core.Identifier, expectedType common.Entity) error { if id == nil { return shared.GetMissingArgumentError(shared.ID) } - if entityToResourceType[expectedType] != id.ResourceType { + if entityToResourceType[expectedType] != id.GetResourceType() { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "unexpected resource type %s for identifier [%+v], expected %s instead", - strings.ToLower(id.ResourceType.String()), id, strings.ToLower(entityToResourceType[expectedType].String())) + strings.ToLower(id.GetResourceType().String()), id, strings.ToLower(entityToResourceType[expectedType].String())) } return ValidateIdentifierFieldsSet(id) } @@ -113,13 +113,13 @@ func ValidateNamedEntityIdentifier(id *admin.NamedEntityIdentifier) error { if id == nil { return shared.GetMissingArgumentError(shared.ID) } - if err := ValidateEmptyStringField(id.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(id.GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(id.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(id.GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateEmptyStringField(id.Name, shared.Name); err != nil { + if err := ValidateEmptyStringField(id.GetName(), shared.Name); err != nil { return err } return nil @@ -144,92 +144,92 @@ func ValidateVersion(version string) error { } func ValidateResourceListRequest(request *admin.ResourceListRequest) error { - if request.Id == nil { + if request.GetId() == nil { return shared.GetMissingArgumentError(shared.ID) } - if err := ValidateEmptyStringField(request.Id.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(request.Id.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil } func ValidateDescriptionEntityListRequest(request *admin.DescriptionEntityListRequest) error { - if request.Id == nil { + if request.GetId() == nil { return shared.GetMissingArgumentError(shared.ID) } - if err := ValidateEmptyStringField(request.Id.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(request.Id.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateEmptyStringField(request.Id.Name, shared.Name); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetName(), shared.Name); err != nil { return err } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil } func ValidateActiveLaunchPlanRequest(request *admin.ActiveLaunchPlanRequest) error { - if err := ValidateEmptyStringField(request.Id.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(request.Id.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateEmptyStringField(request.Id.Name, shared.Name); err != nil { + if err := ValidateEmptyStringField(request.GetId().GetName(), shared.Name); err != nil { return err } return nil } func ValidateActiveLaunchPlanListRequest(request *admin.ActiveLaunchPlanListRequest) error { - if err := ValidateEmptyStringField(request.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(request.GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(request.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(request.GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil } func ValidateNamedEntityIdentifierListRequest(request *admin.NamedEntityIdentifierListRequest) error { - if err := ValidateEmptyStringField(request.Project, shared.Project); err != nil { + if err := ValidateEmptyStringField(request.GetProject(), shared.Project); err != nil { return err } - if err := ValidateEmptyStringField(request.Domain, shared.Domain); err != nil { + if err := ValidateEmptyStringField(request.GetDomain(), shared.Domain); err != nil { return err } - if err := ValidateLimit(request.Limit); err != nil { + if err := ValidateLimit(request.GetLimit()); err != nil { return err } return nil } func ValidateDescriptionEntityGetRequest(request *admin.ObjectGetRequest) error { - if err := ValidateResourceType(request.Id.ResourceType); err != nil { + if err := ValidateResourceType(request.GetId().GetResourceType()); err != nil { return err } - if err := ValidateIdentifierFieldsSet(request.Id); err != nil { + if err := ValidateIdentifierFieldsSet(request.GetId()); err != nil { return err } return nil } func validateLiteralMap(inputMap *core.LiteralMap, fieldName string) error { - if inputMap != nil && len(inputMap.Literals) > 0 { - for name, fixedInput := range inputMap.Literals { + if inputMap != nil && len(inputMap.GetLiterals()) > 0 { + for name, fixedInput := range inputMap.GetLiterals() { if name == "" { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "missing key in %s", fieldName) } @@ -251,8 +251,8 @@ func validateParameterMapAllowArtifacts(inputMap *core.ParameterMap, fieldName s } func validateParameterMapDisableArtifacts(inputMap *core.ParameterMap, fieldName string) error { - if inputMap != nil && len(inputMap.Parameters) > 0 { - for name, defaultInput := range inputMap.Parameters { + if inputMap != nil && len(inputMap.GetParameters()) > 0 { + for name, defaultInput := range inputMap.GetParameters() { if defaultInput.GetArtifactQuery() != nil { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "artifact mode not enabled but query found %s %s", fieldName, name) } @@ -262,8 +262,8 @@ func validateParameterMapDisableArtifacts(inputMap *core.ParameterMap, fieldName } func validateParameterMap(inputMap *core.ParameterMap, fieldName string) error { - if inputMap != nil && len(inputMap.Parameters) > 0 { - for name, defaultInput := range inputMap.Parameters { + if inputMap != nil && len(inputMap.GetParameters()) > 0 { + for name, defaultInput := range inputMap.GetParameters() { if name == "" { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "missing key in %s", fieldName) } @@ -341,7 +341,7 @@ func ValidateDatetime(literal *core.Literal) error { err := timestamp.CheckValid() if err != nil { - return errors.NewFlyteAdminErrorf(codes.InvalidArgument, err.Error()) + return errors.NewFlyteAdminErrorf(codes.InvalidArgument, err.Error()) //nolint } return nil } diff --git a/flyteadmin/pkg/manager/impl/validation/workflow_validator.go b/flyteadmin/pkg/manager/impl/validation/workflow_validator.go index d5d2681375..7a5f36e78b 100644 --- a/flyteadmin/pkg/manager/impl/validation/workflow_validator.go +++ b/flyteadmin/pkg/manager/impl/validation/workflow_validator.go @@ -22,13 +22,13 @@ const numSystemNodes = 2 // A workflow graph always has a start and end node inj func ValidateWorkflow( ctx context.Context, request *admin.WorkflowCreateRequest, db repositoryInterfaces.Repository, config runtime.ApplicationConfiguration) error { - if err := ValidateIdentifier(request.Id, common.Workflow); err != nil { + if err := ValidateIdentifier(request.GetId(), common.Workflow); err != nil { return err } - if err := ValidateProjectAndDomain(ctx, db, config, request.Id.Project, request.Id.Domain); err != nil { + if err := ValidateProjectAndDomain(ctx, db, config, request.GetId().GetProject(), request.GetId().GetDomain()); err != nil { return err } - if request.Spec == nil || request.Spec.Template == nil { + if request.GetSpec() == nil || request.GetSpec().GetTemplate() == nil { return shared.GetMissingArgumentError(shared.Spec) } return nil @@ -47,12 +47,12 @@ func ValidateCompiledWorkflow(identifier *core.Identifier, workflow *admin.Workf // Treat this is unset. There is no limit to compare against. return nil } - if workflow.CompiledWorkflow == nil || workflow.CompiledWorkflow.Primary == nil || - workflow.CompiledWorkflow.Primary.Template == nil || workflow.CompiledWorkflow.Primary.Template.Nodes == nil { + if workflow.GetCompiledWorkflow() == nil || workflow.GetCompiledWorkflow().GetPrimary() == nil || + workflow.GetCompiledWorkflow().GetPrimary().GetTemplate() == nil || workflow.GetCompiledWorkflow().GetPrimary().GetTemplate().GetNodes() == nil { logger.Warningf(context.Background(), "workflow [%+v] did not have any primary nodes", identifier) return nil } - numUserNodes := len(workflow.CompiledWorkflow.Primary.Template.Nodes) - numSystemNodes + numUserNodes := len(workflow.GetCompiledWorkflow().GetPrimary().GetTemplate().GetNodes()) - numSystemNodes if numUserNodes > config.GetWorkflowNodeLimit() { return errors.NewFlyteAdminErrorf(codes.InvalidArgument, "number of nodes in workflow [%+v] exceeds limit (%v > %v)", identifier, diff --git a/flyteadmin/pkg/manager/impl/version_manager_test.go b/flyteadmin/pkg/manager/impl/version_manager_test.go index 5cea4a0b15..7b5b5c9bac 100644 --- a/flyteadmin/pkg/manager/impl/version_manager_test.go +++ b/flyteadmin/pkg/manager/impl/version_manager_test.go @@ -24,7 +24,7 @@ func TestVersionManager_GetVersion(t *testing.T) { v, err := vmanager.GetVersion(context.Background(), &admin.GetVersionRequest{}) assert.Nil(t, err) - assert.Equal(t, v.ControlPlaneVersion.BuildTime, buildTime) - assert.Equal(t, v.ControlPlaneVersion.Build, build) - assert.Equal(t, v.ControlPlaneVersion.Version, appversion) + assert.Equal(t, v.GetControlPlaneVersion().GetBuildTime(), buildTime) + assert.Equal(t, v.GetControlPlaneVersion().GetBuild(), build) + assert.Equal(t, v.GetControlPlaneVersion().GetVersion(), appversion) } diff --git a/flyteadmin/pkg/manager/impl/workflow_manager.go b/flyteadmin/pkg/manager/impl/workflow_manager.go index d3bfdc67dd..b99de8773f 100644 --- a/flyteadmin/pkg/manager/impl/workflow_manager.go +++ b/flyteadmin/pkg/manager/impl/workflow_manager.go @@ -48,26 +48,26 @@ type WorkflowManager struct { } func getWorkflowContext(ctx context.Context, identifier *core.Identifier) context.Context { - ctx = contextutils.WithProjectDomain(ctx, identifier.Project, identifier.Domain) - return contextutils.WithWorkflowID(ctx, identifier.Name) + ctx = contextutils.WithProjectDomain(ctx, identifier.GetProject(), identifier.GetDomain()) + return contextutils.WithWorkflowID(ctx, identifier.GetName()) } func (w *WorkflowManager) setDefaults(request *admin.WorkflowCreateRequest) (*admin.WorkflowCreateRequest, error) { // TODO: Also add environment and configuration defaults once those have been determined. - if request.Id == nil { + if request.GetId() == nil { return request, errors.NewFlyteAdminError(codes.InvalidArgument, "missing identifier for WorkflowCreateRequest") } - request.Spec.Template.Id = request.Id + request.Spec.Template.Id = request.GetId() return request, nil } func (w *WorkflowManager) getCompiledWorkflow( ctx context.Context, request *admin.WorkflowCreateRequest) (*admin.WorkflowClosure, error) { - reqs, err := w.compiler.GetRequirements(request.Spec.Template, request.Spec.SubWorkflows) + reqs, err := w.compiler.GetRequirements(request.GetSpec().GetTemplate(), request.GetSpec().GetSubWorkflows()) if err != nil { w.metrics.CompilationFailures.Inc() logger.Errorf(ctx, "Failed to get workflow requirements for template [%+v] with err %v", - request.Spec.Template, err) + request.GetSpec().GetTemplate(), err) return &admin.WorkflowClosure{}, err } @@ -76,10 +76,10 @@ func (w *WorkflowManager) getCompiledWorkflow( task, err := util.GetTask(ctx, w.db, taskID) if err != nil { logger.Debugf(ctx, "Failed to get task with id [%+v] when compiling workflow with id [%+v] with err %v", - taskID, request.Id, err) + taskID, request.GetId(), err) return &admin.WorkflowClosure{}, err } - tasks[idx] = task.Closure.CompiledTask + tasks[idx] = task.GetClosure().GetCompiledTask() } var launchPlans = make([]compiler.InterfaceProvider, len(reqs.GetRequiredLaunchPlanIds())) @@ -88,7 +88,7 @@ func (w *WorkflowManager) getCompiledWorkflow( launchPlanModel, err = util.GetLaunchPlanModel(ctx, w.db, launchPlanID) if err != nil { logger.Debugf(ctx, "Failed to get launch plan with id [%+v] when compiling workflow with id [%+v] with err %v", - launchPlanID, request.Id, err) + launchPlanID, request.GetId(), err) return &admin.WorkflowClosure{}, err } var launchPlanInterfaceProvider workflowengine.InterfaceProvider @@ -101,16 +101,16 @@ func (w *WorkflowManager) getCompiledWorkflow( launchPlans[idx] = launchPlanInterfaceProvider } - closure, err := w.compiler.CompileWorkflow(request.Spec.Template, request.Spec.SubWorkflows, tasks, launchPlans) + closure, err := w.compiler.CompileWorkflow(request.GetSpec().GetTemplate(), request.GetSpec().GetSubWorkflows(), tasks, launchPlans) if err != nil { w.metrics.CompilationFailures.Inc() - logger.Debugf(ctx, "Failed to compile workflow with id [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to compile workflow with id [%+v] with err %v", request.GetId(), err) return &admin.WorkflowClosure{}, err } createdAt, err := ptypes.TimestampProto(time.Now()) if err != nil { return &admin.WorkflowClosure{}, errors.NewFlyteAdminErrorf(codes.Internal, - "Failed to serialize CreatedAt: %v when saving compiled workflow %+v", err, request.Id) + "Failed to serialize CreatedAt: %v when saving compiled workflow %+v", err, request.GetId()) } return &admin.WorkflowClosure{ CompiledWorkflow: closure, @@ -121,10 +121,10 @@ func (w *WorkflowManager) getCompiledWorkflow( func (w *WorkflowManager) createDataReference( ctx context.Context, identifier *core.Identifier) (storage.DataReference, error) { nestedSubKeys := []string{ - identifier.Project, - identifier.Domain, - identifier.Name, - identifier.Version, + identifier.GetProject(), + identifier.GetDomain(), + identifier.GetName(), + identifier.GetVersion(), } nestedKeys := append(w.storagePrefix, nestedSubKeys...) return w.storageClient.ConstructReference(ctx, w.storageClient.GetBaseContainerFQN(ctx), nestedKeys...) @@ -136,10 +136,10 @@ func (w *WorkflowManager) CreateWorkflow( if err := validation.ValidateWorkflow(ctx, request, w.db, w.config.ApplicationConfiguration()); err != nil { return nil, err } - ctx = getWorkflowContext(ctx, request.Id) + ctx = getWorkflowContext(ctx, request.GetId()) finalizedRequest, err := w.setDefaults(request) if err != nil { - logger.Debugf(ctx, "Failed to set defaults for workflow with id [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to set defaults for workflow with id [%+v] with err %v", request.GetId(), err) return nil, err } // Validate that the workflow compiles. @@ -147,21 +147,21 @@ func (w *WorkflowManager) CreateWorkflow( if err != nil { logger.Errorf(ctx, "Failed to compile workflow with err: %v", err) return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "failed to compile workflow for [%+v] with err: %v", request.Id, err) + "failed to compile workflow for [%+v] with err: %v", request.GetId(), err) } err = validation.ValidateCompiledWorkflow( - request.Id, workflowClosure, w.config.RegistrationValidationConfiguration()) + request.GetId(), workflowClosure, w.config.RegistrationValidationConfiguration()) if err != nil { return nil, err } - workflowDigest, err := util.GetWorkflowDigest(ctx, workflowClosure.CompiledWorkflow) + workflowDigest, err := util.GetWorkflowDigest(ctx, workflowClosure.GetCompiledWorkflow()) if err != nil { logger.Errorf(ctx, "failed to compute workflow digest with err %v", err) return nil, err } // Assert that a matching workflow doesn't already exist before uploading the workflow closure. - existingWorkflowModel, err := util.GetWorkflowModel(ctx, w.db, request.Id) + existingWorkflowModel, err := util.GetWorkflowModel(ctx, w.db, request.GetId()) // Check that no identical or conflicting workflows exist. if err == nil { // A workflow's structure is uniquely defined by its collection of nodes. @@ -174,29 +174,29 @@ func (w *WorkflowManager) CreateWorkflow( return nil, transformerErr } // A workflow exists with different structure - return nil, errors.NewWorkflowExistsDifferentStructureError(ctx, request, existingWorkflow.Closure.GetCompiledWorkflow(), workflowClosure.GetCompiledWorkflow()) + return nil, errors.NewWorkflowExistsDifferentStructureError(ctx, request, existingWorkflow.GetClosure().GetCompiledWorkflow(), workflowClosure.GetCompiledWorkflow()) } else if flyteAdminError, ok := err.(errors.FlyteAdminError); !ok || flyteAdminError.Code() != codes.NotFound { logger.Debugf(ctx, "Failed to get workflow for comparison in CreateWorkflow with ID [%+v] with err %v", - request.Id, err) + request.GetId(), err) return nil, err } - remoteClosureDataRef, err := w.createDataReference(ctx, request.Spec.Template.Id) + remoteClosureDataRef, err := w.createDataReference(ctx, request.GetSpec().GetTemplate().GetId()) if err != nil { logger.Infof(ctx, "failed to construct data reference for workflow closure with id [%+v] with err %v", - request.Id, err) + request.GetId(), err) return nil, errors.NewFlyteAdminErrorf(codes.Internal, - "failed to construct data reference for workflow closure with id [%+v] and err %v", request.Id, err) + "failed to construct data reference for workflow closure with id [%+v] and err %v", request.GetId(), err) } err = w.storageClient.WriteProtobuf(ctx, remoteClosureDataRef, defaultStorageOptions, workflowClosure) if err != nil { logger.Infof(ctx, "failed to write marshaled workflow with id [%+v] to storage %s with err %v and base container: %s", - request.Id, remoteClosureDataRef.String(), err, w.storageClient.GetBaseContainerFQN(ctx)) + request.GetId(), remoteClosureDataRef.String(), err, w.storageClient.GetBaseContainerFQN(ctx)) return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to write marshaled workflow [%+v] to storage %s with err %v and base container: %s", - request.Id, remoteClosureDataRef.String(), err, w.storageClient.GetBaseContainerFQN(ctx)) + request.GetId(), remoteClosureDataRef.String(), err, w.storageClient.GetBaseContainerFQN(ctx)) } // Save the workflow & its reference to the offloaded, compiled workflow in the database. workflowModel, err := transformers.CreateWorkflowModel( @@ -207,17 +207,17 @@ func (w *WorkflowManager) CreateWorkflow( finalizedRequest, remoteClosureDataRef.String(), err) return nil, err } - descriptionModel, err := transformers.CreateDescriptionEntityModel(request.Spec.Description, request.Id) + descriptionModel, err := transformers.CreateDescriptionEntityModel(request.GetSpec().GetDescription(), request.GetId()) if err != nil { logger.Errorf(ctx, - "Failed to transform description model [%+v] with err: %v", request.Spec.Description, err) + "Failed to transform description model [%+v] with err: %v", request.GetSpec().GetDescription(), err) return nil, err } if descriptionModel != nil { workflowModel.ShortDescription = descriptionModel.ShortDescription } if err = w.db.WorkflowRepo().Create(ctx, workflowModel, descriptionModel); err != nil { - logger.Infof(ctx, "Failed to create workflow model [%+v] with err %v", request.Id, err) + logger.Infof(ctx, "Failed to create workflow model [%+v] with err %v", request.GetId(), err) return nil, err } w.metrics.TypedInterfaceSizeBytes.Observe(float64(len(workflowModel.TypedInterface))) @@ -226,14 +226,14 @@ func (w *WorkflowManager) CreateWorkflow( } func (w *WorkflowManager) GetWorkflow(ctx context.Context, request *admin.ObjectGetRequest) (*admin.Workflow, error) { - if err := validation.ValidateIdentifier(request.Id, common.Workflow); err != nil { - logger.Debugf(ctx, "invalid identifier [%+v]: %v", request.Id, err) + if err := validation.ValidateIdentifier(request.GetId(), common.Workflow); err != nil { + logger.Debugf(ctx, "invalid identifier [%+v]: %v", request.GetId(), err) return nil, err } - ctx = getWorkflowContext(ctx, request.Id) - workflow, err := util.GetWorkflow(ctx, w.db, w.storageClient, request.Id) + ctx = getWorkflowContext(ctx, request.GetId()) + workflow, err := util.GetWorkflow(ctx, w.db, w.storageClient, request.GetId()) if err != nil { - logger.Infof(ctx, "Failed to get workflow with id [%+v] with err %v", request.Id, err) + logger.Infof(ctx, "Failed to get workflow with id [%+v] with err %v", request.GetId(), err) return nil, err } return workflow, nil @@ -246,37 +246,37 @@ func (w *WorkflowManager) ListWorkflows( if err := validation.ValidateResourceListRequest(request); err != nil { return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Id.Project, request.Id.Domain) - ctx = contextutils.WithWorkflowID(ctx, request.Id.Name) + ctx = contextutils.WithProjectDomain(ctx, request.GetId().GetProject(), request.GetId().GetDomain()) + ctx = contextutils.WithWorkflowID(ctx, request.GetId().GetName()) filters, err := util.GetDbFilters(util.FilterSpec{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, - RequestFilters: request.Filters, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), + RequestFilters: request.GetFilters(), }, common.Workflow) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.WorkflowColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.WorkflowColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListWorkflows", request.Token) + "invalid pagination token %s for ListWorkflows", request.GetToken()) } listWorkflowsInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, } output, err := w.db.WorkflowRepo().List(ctx, listWorkflowsInput) if err != nil { - logger.Debugf(ctx, "Failed to list workflows with [%+v] with err %v", request.Id, err) + logger.Debugf(ctx, "Failed to list workflows with [%+v] with err %v", request.GetId(), err) return nil, err } workflowList, err := transformers.FromWorkflowModels(output.Workflows) @@ -286,7 +286,7 @@ func (w *WorkflowManager) ListWorkflows( return nil, err } var token string - if len(output.Workflows) == int(request.Limit) { + if len(output.Workflows) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(output.Workflows)) } return &admin.WorkflowList{ @@ -301,28 +301,28 @@ func (w *WorkflowManager) ListWorkflowIdentifiers(ctx context.Context, request * logger.Debugf(ctx, "invalid request [%+v]: %v", request, err) return nil, err } - ctx = contextutils.WithProjectDomain(ctx, request.Project, request.Domain) + ctx = contextutils.WithProjectDomain(ctx, request.GetProject(), request.GetDomain()) filters, err := util.GetDbFilters(util.FilterSpec{ - Project: request.Project, - Domain: request.Domain, + Project: request.GetProject(), + Domain: request.GetDomain(), }, common.Workflow) if err != nil { return nil, err } - sortParameter, err := common.NewSortParameter(request.SortBy, models.WorkflowColumns) + sortParameter, err := common.NewSortParameter(request.GetSortBy(), models.WorkflowColumns) if err != nil { return nil, err } - offset, err := validation.ValidateToken(request.Token) + offset, err := validation.ValidateToken(request.GetToken()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.InvalidArgument, - "invalid pagination token %s for ListWorkflowIdentifiers", request.Token) + "invalid pagination token %s for ListWorkflowIdentifiers", request.GetToken()) } listWorkflowsInput := repoInterfaces.ListResourceInput{ - Limit: int(request.Limit), + Limit: int(request.GetLimit()), Offset: offset, InlineFilters: filters, SortParameter: sortParameter, @@ -331,12 +331,12 @@ func (w *WorkflowManager) ListWorkflowIdentifiers(ctx context.Context, request * output, err := w.db.WorkflowRepo().ListIdentifiers(ctx, listWorkflowsInput) if err != nil { logger.Debugf(ctx, "Failed to list workflow ids with project: %s and domain: %s with err %v", - request.Project, request.Domain, err) + request.GetProject(), request.GetDomain(), err) return nil, err } var token string - if len(output.Workflows) == int(request.Limit) { + if len(output.Workflows) == int(request.GetLimit()) { token = strconv.Itoa(offset + len(output.Workflows)) } entities := transformers.FromWorkflowModelsToIdentifiers(output.Workflows) diff --git a/flyteadmin/pkg/manager/impl/workflow_manager_test.go b/flyteadmin/pkg/manager/impl/workflow_manager_test.go index 8072453bbd..280624a956 100644 --- a/flyteadmin/pkg/manager/impl/workflow_manager_test.go +++ b/flyteadmin/pkg/manager/impl/workflow_manager_test.go @@ -128,7 +128,7 @@ func TestSetWorkflowDefaults(t *testing.T) { request := testutils.GetWorkflowRequest() finalizedRequest, err := workflowManager.(*WorkflowManager).setDefaults(request) assert.NoError(t, err) - assert.True(t, proto.Equal(workflowIdentifier, finalizedRequest.Spec.Template.Id)) + assert.True(t, proto.Equal(workflowIdentifier, finalizedRequest.GetSpec().GetTemplate().GetId())) } func TestCreateWorkflow(t *testing.T) { @@ -309,12 +309,12 @@ func TestGetWorkflow(t *testing.T) { Id: workflowIdentifier, }) assert.NoError(t, err) - assert.Equal(t, "project", workflow.Id.Project) - assert.Equal(t, "domain", workflow.Id.Domain) - assert.Equal(t, "name", workflow.Id.Name) - assert.Equal(t, "version", workflow.Id.Version) - assert.True(t, proto.Equal(testutils.GetWorkflowClosure(), workflow.Closure), - "%+v !=\n %+v", testutils.GetWorkflowClosure(), workflow.Closure) + assert.Equal(t, "project", workflow.GetId().GetProject()) + assert.Equal(t, "domain", workflow.GetId().GetDomain()) + assert.Equal(t, "name", workflow.GetId().GetName()) + assert.Equal(t, "version", workflow.GetId().GetVersion()) + assert.True(t, proto.Equal(testutils.GetWorkflowClosure(), workflow.GetClosure()), + "%+v !=\n %+v", testutils.GetWorkflowClosure(), workflow.GetClosure()) } func TestGetWorkflow_DatabaseError(t *testing.T) { @@ -450,13 +450,13 @@ func TestListWorkflows(t *testing.T) { }) assert.NoError(t, err) assert.NotNil(t, workflowList) - assert.Len(t, workflowList.Workflows, 2) + assert.Len(t, workflowList.GetWorkflows(), 2) - for idx, workflow := range workflowList.Workflows { - assert.Equal(t, projectValue, workflow.Id.Project) - assert.Equal(t, domainValue, workflow.Id.Domain) - assert.Equal(t, nameValue, workflow.Id.Name) - assert.Equal(t, fmt.Sprintf("version %v", idx), workflow.Id.Version) + for idx, workflow := range workflowList.GetWorkflows() { + assert.Equal(t, projectValue, workflow.GetId().GetProject()) + assert.Equal(t, domainValue, workflow.GetId().GetDomain()) + assert.Equal(t, nameValue, workflow.GetId().GetName()) + assert.Equal(t, fmt.Sprintf("version %v", idx), workflow.GetId().GetVersion()) assert.True(t, proto.Equal(&admin.WorkflowClosure{ CreatedAt: testutils.MockCreatedAtProto, CompiledWorkflow: &core.CompiledWorkflowClosure{ @@ -466,9 +466,9 @@ func TestListWorkflows(t *testing.T) { }, }, }, - }, workflow.Closure)) + }, workflow.GetClosure())) } - assert.Empty(t, workflowList.Token) + assert.Empty(t, workflowList.GetToken()) } func TestListWorkflows_MissingParameters(t *testing.T) { @@ -584,11 +584,11 @@ func TestWorkflowManager_ListWorkflowIdentifiers(t *testing.T) { }) assert.NoError(t, err) assert.NotNil(t, workflowList) - assert.Len(t, workflowList.Entities, 2) + assert.Len(t, workflowList.GetEntities(), 2) - for _, entity := range workflowList.Entities { - assert.Equal(t, projectValue, entity.Project) - assert.Equal(t, domainValue, entity.Domain) - assert.Equal(t, nameValue, entity.Name) + for _, entity := range workflowList.GetEntities() { + assert.Equal(t, projectValue, entity.GetProject()) + assert.Equal(t, domainValue, entity.GetDomain()) + assert.Equal(t, nameValue, entity.GetName()) } } diff --git a/flyteadmin/pkg/repositories/gormimpl/common.go b/flyteadmin/pkg/repositories/gormimpl/common.go index b103ef0e43..7f4d4f370a 100644 --- a/flyteadmin/pkg/repositories/gormimpl/common.go +++ b/flyteadmin/pkg/repositories/gormimpl/common.go @@ -52,17 +52,14 @@ var entityToTableName = map[common.Entity]string{ } var innerJoinExecToNodeExec = fmt.Sprintf( - "INNER JOIN %[1]s ON %[2]s.execution_project = %[1]s.execution_project AND "+ - "%[2]s.execution_domain = %[1]s.execution_domain AND %[2]s.execution_name = %[1]s.execution_name", + "INNER JOIN %[1]s ON %[2]s.execution_project = %[1]s.execution_project AND %[2]s.execution_domain = %[1]s.execution_domain AND %[2]s.execution_name = %[1]s.execution_name", executionTableName, nodeExecutionTableName) var innerJoinExecToTaskExec = fmt.Sprintf( - "INNER JOIN %[1]s ON %[2]s.execution_project = %[1]s.execution_project AND "+ - "%[2]s.execution_domain = %[1]s.execution_domain AND %[2]s.execution_name = %[1]s.execution_name", + "INNER JOIN %[1]s ON %[2]s.execution_project = %[1]s.execution_project AND %[2]s.execution_domain = %[1]s.execution_domain AND %[2]s.execution_name = %[1]s.execution_name", executionTableName, taskExecutionTableName) var innerJoinNodeExecToTaskExec = fmt.Sprintf( - "INNER JOIN %[1]s ON %s.node_id = %[1]s.node_id AND %[2]s.execution_project = %[1]s.execution_project AND "+ - "%[2]s.execution_domain = %[1]s.execution_domain AND %[2]s.execution_name = %[1]s.execution_name", + "INNER JOIN %[1]s ON %s.node_id = %[1]s.node_id AND %[2]s.execution_project = %[1]s.execution_project AND %[2]s.execution_domain = %[1]s.execution_domain AND %[2]s.execution_name = %[1]s.execution_name", nodeExecutionTableName, taskExecutionTableName) // Because dynamic tasks do NOT necessarily register static task definitions, we use a left join to not exclude diff --git a/flyteadmin/pkg/repositories/gormimpl/node_execution_repo.go b/flyteadmin/pkg/repositories/gormimpl/node_execution_repo.go index b1772862dc..f39c6df554 100644 --- a/flyteadmin/pkg/repositories/gormimpl/node_execution_repo.go +++ b/flyteadmin/pkg/repositories/gormimpl/node_execution_repo.go @@ -36,11 +36,11 @@ func (r *NodeExecutionRepo) Get(ctx context.Context, input interfaces.NodeExecut timer := r.metrics.GetDuration.Start() tx := r.db.WithContext(ctx).Where(&models.NodeExecution{ NodeExecutionKey: models.NodeExecutionKey{ - NodeID: input.NodeExecutionIdentifier.NodeId, + NodeID: input.NodeExecutionIdentifier.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: input.NodeExecutionIdentifier.ExecutionId.Project, - Domain: input.NodeExecutionIdentifier.ExecutionId.Domain, - Name: input.NodeExecutionIdentifier.ExecutionId.Name, + Project: input.NodeExecutionIdentifier.GetExecutionId().GetProject(), + Domain: input.NodeExecutionIdentifier.GetExecutionId().GetDomain(), + Name: input.NodeExecutionIdentifier.GetExecutionId().GetName(), }, }, }).Take(&nodeExecution) @@ -49,11 +49,11 @@ func (r *NodeExecutionRepo) Get(ctx context.Context, input interfaces.NodeExecut if tx.Error != nil && errors.Is(tx.Error, gorm.ErrRecordNotFound) { return models.NodeExecution{}, adminErrors.GetMissingEntityError("node execution", &core.NodeExecutionIdentifier{ - NodeId: input.NodeExecutionIdentifier.NodeId, + NodeId: input.NodeExecutionIdentifier.GetNodeId(), ExecutionId: &core.WorkflowExecutionIdentifier{ - Project: input.NodeExecutionIdentifier.ExecutionId.Project, - Domain: input.NodeExecutionIdentifier.ExecutionId.Domain, - Name: input.NodeExecutionIdentifier.ExecutionId.Name, + Project: input.NodeExecutionIdentifier.GetExecutionId().GetProject(), + Domain: input.NodeExecutionIdentifier.GetExecutionId().GetDomain(), + Name: input.NodeExecutionIdentifier.GetExecutionId().GetName(), }, }) } else if tx.Error != nil { @@ -68,11 +68,11 @@ func (r *NodeExecutionRepo) GetWithChildren(ctx context.Context, input interface timer := r.metrics.GetDuration.Start() tx := r.db.WithContext(ctx).Where(&models.NodeExecution{ NodeExecutionKey: models.NodeExecutionKey{ - NodeID: input.NodeExecutionIdentifier.NodeId, + NodeID: input.NodeExecutionIdentifier.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: input.NodeExecutionIdentifier.ExecutionId.Project, - Domain: input.NodeExecutionIdentifier.ExecutionId.Domain, - Name: input.NodeExecutionIdentifier.ExecutionId.Name, + Project: input.NodeExecutionIdentifier.GetExecutionId().GetProject(), + Domain: input.NodeExecutionIdentifier.GetExecutionId().GetDomain(), + Name: input.NodeExecutionIdentifier.GetExecutionId().GetName(), }, }, }).Preload("ChildNodeExecutions").Take(&nodeExecution) @@ -81,11 +81,11 @@ func (r *NodeExecutionRepo) GetWithChildren(ctx context.Context, input interface if tx.Error != nil && errors.Is(tx.Error, gorm.ErrRecordNotFound) { return models.NodeExecution{}, adminErrors.GetMissingEntityError("node execution", &core.NodeExecutionIdentifier{ - NodeId: input.NodeExecutionIdentifier.NodeId, + NodeId: input.NodeExecutionIdentifier.GetNodeId(), ExecutionId: &core.WorkflowExecutionIdentifier{ - Project: input.NodeExecutionIdentifier.ExecutionId.Project, - Domain: input.NodeExecutionIdentifier.ExecutionId.Domain, - Name: input.NodeExecutionIdentifier.ExecutionId.Name, + Project: input.NodeExecutionIdentifier.GetExecutionId().GetProject(), + Domain: input.NodeExecutionIdentifier.GetExecutionId().GetDomain(), + Name: input.NodeExecutionIdentifier.GetExecutionId().GetName(), }, }) } else if tx.Error != nil { @@ -144,11 +144,11 @@ func (r *NodeExecutionRepo) Exists(ctx context.Context, input interfaces.NodeExe timer := r.metrics.ExistsDuration.Start() tx := r.db.WithContext(ctx).Select(ID).Where(&models.NodeExecution{ NodeExecutionKey: models.NodeExecutionKey{ - NodeID: input.NodeExecutionIdentifier.NodeId, + NodeID: input.NodeExecutionIdentifier.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: input.NodeExecutionIdentifier.ExecutionId.Project, - Domain: input.NodeExecutionIdentifier.ExecutionId.Domain, - Name: input.NodeExecutionIdentifier.ExecutionId.Name, + Project: input.NodeExecutionIdentifier.GetExecutionId().GetProject(), + Domain: input.NodeExecutionIdentifier.GetExecutionId().GetDomain(), + Name: input.NodeExecutionIdentifier.GetExecutionId().GetName(), }, }, }).Take(&nodeExecution) diff --git a/flyteadmin/pkg/repositories/gormimpl/task_execution_repo.go b/flyteadmin/pkg/repositories/gormimpl/task_execution_repo.go index d4d30bef85..ba473c2968 100644 --- a/flyteadmin/pkg/repositories/gormimpl/task_execution_repo.go +++ b/flyteadmin/pkg/repositories/gormimpl/task_execution_repo.go @@ -37,17 +37,17 @@ func (r *TaskExecutionRepo) Get(ctx context.Context, input interfaces.GetTaskExe tx := r.db.WithContext(ctx).Where(&models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: input.TaskExecutionID.TaskId.Project, - Domain: input.TaskExecutionID.TaskId.Domain, - Name: input.TaskExecutionID.TaskId.Name, - Version: input.TaskExecutionID.TaskId.Version, + Project: input.TaskExecutionID.GetTaskId().GetProject(), + Domain: input.TaskExecutionID.GetTaskId().GetDomain(), + Name: input.TaskExecutionID.GetTaskId().GetName(), + Version: input.TaskExecutionID.GetTaskId().GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: input.TaskExecutionID.NodeExecutionId.NodeId, + NodeID: input.TaskExecutionID.GetNodeExecutionId().GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: input.TaskExecutionID.NodeExecutionId.ExecutionId.Project, - Domain: input.TaskExecutionID.NodeExecutionId.ExecutionId.Domain, - Name: input.TaskExecutionID.NodeExecutionId.ExecutionId.Name, + Project: input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetProject(), + Domain: input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetDomain(), + Name: input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetName(), }, }, RetryAttempt: &input.TaskExecutionID.RetryAttempt, @@ -59,17 +59,17 @@ func (r *TaskExecutionRepo) Get(ctx context.Context, input interfaces.GetTaskExe return models.TaskExecution{}, flyteAdminDbErrors.GetMissingEntityError("task execution", &core.TaskExecutionIdentifier{ TaskId: &core.Identifier{ - Project: input.TaskExecutionID.TaskId.Project, - Domain: input.TaskExecutionID.TaskId.Domain, - Name: input.TaskExecutionID.TaskId.Name, - Version: input.TaskExecutionID.TaskId.Version, + Project: input.TaskExecutionID.GetTaskId().GetProject(), + Domain: input.TaskExecutionID.GetTaskId().GetDomain(), + Name: input.TaskExecutionID.GetTaskId().GetName(), + Version: input.TaskExecutionID.GetTaskId().GetVersion(), }, NodeExecutionId: &core.NodeExecutionIdentifier{ - NodeId: input.TaskExecutionID.NodeExecutionId.NodeId, + NodeId: input.TaskExecutionID.GetNodeExecutionId().GetNodeId(), ExecutionId: &core.WorkflowExecutionIdentifier{ - Project: input.TaskExecutionID.NodeExecutionId.ExecutionId.Project, - Domain: input.TaskExecutionID.NodeExecutionId.ExecutionId.Domain, - Name: input.TaskExecutionID.NodeExecutionId.ExecutionId.Name, + Project: input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetProject(), + Domain: input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetDomain(), + Name: input.TaskExecutionID.GetNodeExecutionId().GetExecutionId().GetName(), }, }, }) diff --git a/flyteadmin/pkg/repositories/transformers/description_entity.go b/flyteadmin/pkg/repositories/transformers/description_entity.go index d3a816179b..8e16ef0c6d 100644 --- a/flyteadmin/pkg/repositories/transformers/description_entity.go +++ b/flyteadmin/pkg/repositories/transformers/description_entity.go @@ -26,34 +26,34 @@ func CreateDescriptionEntityModel( var sourceCode models.SourceCode var err error - if descriptionEntity.LongDescription != nil { - longDescriptionBytes, err = proto.Marshal(descriptionEntity.LongDescription) + if descriptionEntity.GetLongDescription() != nil { + longDescriptionBytes, err = proto.Marshal(descriptionEntity.GetLongDescription()) if err != nil { logger.Errorf(ctx, "Failed to marshal LongDescription with error: %v", err) return nil, err } } - if descriptionEntity.LongDescription != nil { - longDescriptionBytes, err = proto.Marshal(descriptionEntity.LongDescription) + if descriptionEntity.GetLongDescription() != nil { + longDescriptionBytes, err = proto.Marshal(descriptionEntity.GetLongDescription()) if err != nil { logger.Errorf(ctx, "Failed to marshal LongDescription with error: %v", err) return nil, err } } - if descriptionEntity.SourceCode != nil { - sourceCode = models.SourceCode{Link: descriptionEntity.SourceCode.Link} + if descriptionEntity.GetSourceCode() != nil { + sourceCode = models.SourceCode{Link: descriptionEntity.GetSourceCode().GetLink()} } return &models.DescriptionEntity{ DescriptionEntityKey: models.DescriptionEntityKey{ - ResourceType: id.ResourceType, - Project: id.Project, - Domain: id.Domain, - Name: id.Name, - Version: id.Version, + ResourceType: id.GetResourceType(), + Project: id.GetProject(), + Domain: id.GetDomain(), + Name: id.GetName(), + Version: id.GetVersion(), }, - ShortDescription: descriptionEntity.ShortDescription, + ShortDescription: descriptionEntity.GetShortDescription(), LongDescription: longDescriptionBytes, SourceCode: sourceCode, }, nil diff --git a/flyteadmin/pkg/repositories/transformers/description_entity_test.go b/flyteadmin/pkg/repositories/transformers/description_entity_test.go index 9279ff0f65..b8feeb91b6 100644 --- a/flyteadmin/pkg/repositories/transformers/description_entity_test.go +++ b/flyteadmin/pkg/repositories/transformers/description_entity_test.go @@ -37,7 +37,7 @@ func TestToDescriptionEntityExecutionModel(t *testing.T) { assert.Nil(t, err) assert.Equal(t, shortDescription, model.ShortDescription) assert.Equal(t, longDescriptionBytes, model.LongDescription) - assert.Equal(t, sourceCode.Link, model.Link) + assert.Equal(t, sourceCode.GetLink(), model.Link) } func TestFromDescriptionEntityExecutionModel(t *testing.T) { @@ -59,9 +59,9 @@ func TestFromDescriptionEntityExecutionModel(t *testing.T) { SourceCode: models.SourceCode{Link: "https://github/flyte"}, }) assert.Nil(t, err) - assert.Equal(t, descriptionEntity.ShortDescription, shortDescription) - assert.Equal(t, descriptionEntity.LongDescription.IconLink, longDescription.IconLink) - assert.Equal(t, descriptionEntity.SourceCode, sourceCode) + assert.Equal(t, descriptionEntity.GetShortDescription(), shortDescription) + assert.Equal(t, descriptionEntity.GetLongDescription().GetIconLink(), longDescription.GetIconLink()) + assert.Equal(t, descriptionEntity.GetSourceCode(), sourceCode) } func TestFromDescriptionEntityExecutionModels(t *testing.T) { @@ -85,7 +85,7 @@ func TestFromDescriptionEntityExecutionModels(t *testing.T) { }, }) assert.Nil(t, err) - assert.Equal(t, descriptionEntity[0].ShortDescription, shortDescription) - assert.Equal(t, descriptionEntity[0].LongDescription.IconLink, longDescription.IconLink) - assert.Equal(t, descriptionEntity[0].SourceCode, sourceCode) + assert.Equal(t, descriptionEntity[0].GetShortDescription(), shortDescription) + assert.Equal(t, descriptionEntity[0].GetLongDescription().GetIconLink(), longDescription.GetIconLink()) + assert.Equal(t, descriptionEntity[0].GetSourceCode(), sourceCode) } diff --git a/flyteadmin/pkg/repositories/transformers/execution.go b/flyteadmin/pkg/repositories/transformers/execution.go index 711f6bdddb..8943d2303b 100644 --- a/flyteadmin/pkg/repositories/transformers/execution.go +++ b/flyteadmin/pkg/repositories/transformers/execution.go @@ -61,7 +61,7 @@ var ListExecutionTransformerOptions = &ExecutionTransformerOptions{ // CreateExecutionModel transforms a ExecutionCreateRequest to a Execution model func CreateExecutionModel(input CreateExecutionModelInput) (*models.Execution, error) { requestSpec := input.RequestSpec - if requestSpec.Metadata == nil { + if requestSpec.GetMetadata() == nil { requestSpec.Metadata = &admin.ExecutionMetadata{} } requestSpec.Metadata.SystemMetadata = &admin.SystemMetadata{ @@ -81,7 +81,7 @@ func CreateExecutionModel(input CreateExecutionModelInput) (*models.Execution, e WorkflowId: input.WorkflowIdentifier, StateChangeDetails: &admin.ExecutionStateChangeDetails{ State: admin.ExecutionState_EXECUTION_ACTIVE, - Principal: requestSpec.Metadata.Principal, + Principal: requestSpec.GetMetadata().GetPrincipal(), OccurredAt: createdAt, }, } @@ -114,12 +114,12 @@ func CreateExecutionModel(input CreateExecutionModelInput) (*models.Execution, e executionModel := &models.Execution{ ExecutionKey: models.ExecutionKey{ - Project: input.WorkflowExecutionID.Project, - Domain: input.WorkflowExecutionID.Domain, - Name: input.WorkflowExecutionID.Name, + Project: input.WorkflowExecutionID.GetProject(), + Domain: input.WorkflowExecutionID.GetDomain(), + Name: input.WorkflowExecutionID.GetName(), }, Spec: spec, - Phase: closure.Phase.String(), + Phase: closure.GetPhase().String(), Closure: closureBytes, WorkflowID: input.WorkflowID, ExecutionCreatedAt: &input.CreatedAt, @@ -129,7 +129,7 @@ func CreateExecutionModel(input CreateExecutionModelInput) (*models.Execution, e Cluster: input.Cluster, InputsURI: input.InputsURI, UserInputsURI: input.UserInputsURI, - User: requestSpec.Metadata.Principal, + User: requestSpec.GetMetadata().GetPrincipal(), State: &activeExecution, LaunchEntity: strings.ToLower(input.LaunchEntity.String()), } @@ -140,8 +140,8 @@ func CreateExecutionModel(input CreateExecutionModelInput) (*models.Execution, e } else { executionModel.LaunchPlanID = input.LaunchPlanID } - if input.RequestSpec.Metadata != nil { - executionModel.Mode = int32(input.RequestSpec.Metadata.Mode) + if input.RequestSpec.GetMetadata() != nil { + executionModel.Mode = int32(input.RequestSpec.GetMetadata().GetMode()) } return executionModel, nil @@ -151,13 +151,13 @@ func CreateExecutionModel(input CreateExecutionModelInput) (*models.Execution, e func CreateExecutionTagModel(input CreateExecutionModelInput) ([]*models.ExecutionTag, error) { tags := make([]*models.ExecutionTag, 0) - if input.RequestSpec.Labels != nil { - for k, v := range input.RequestSpec.Labels.Values { + if input.RequestSpec.GetLabels() != nil { + for k, v := range input.RequestSpec.GetLabels().GetValues() { tags = append(tags, &models.ExecutionTag{ ExecutionKey: models.ExecutionKey{ - Project: input.WorkflowExecutionID.Project, - Domain: input.WorkflowExecutionID.Domain, - Name: input.WorkflowExecutionID.Name, + Project: input.WorkflowExecutionID.GetProject(), + Domain: input.WorkflowExecutionID.GetDomain(), + Name: input.WorkflowExecutionID.GetName(), }, Key: k, Value: v, @@ -165,12 +165,12 @@ func CreateExecutionTagModel(input CreateExecutionModelInput) ([]*models.Executi } } - for _, v := range input.RequestSpec.Tags { + for _, v := range input.RequestSpec.GetTags() { tags = append(tags, &models.ExecutionTag{ ExecutionKey: models.ExecutionKey{ - Project: input.WorkflowExecutionID.Project, - Domain: input.WorkflowExecutionID.Domain, - Name: input.WorkflowExecutionID.Name, + Project: input.WorkflowExecutionID.GetProject(), + Domain: input.WorkflowExecutionID.GetDomain(), + Name: input.WorkflowExecutionID.GetName(), }, Key: v, Value: "", @@ -189,10 +189,10 @@ func reassignCluster(ctx context.Context, cluster string, executionID *core.Work if err != nil { return flyteErrs.NewFlyteAdminErrorf(codes.Internal, "Failed to unmarshal execution spec: %v", err) } - if executionSpec.Metadata == nil { + if executionSpec.GetMetadata() == nil { executionSpec.Metadata = &admin.ExecutionMetadata{} } - if executionSpec.Metadata.SystemMetadata == nil { + if executionSpec.GetMetadata().GetSystemMetadata() == nil { executionSpec.Metadata.SystemMetadata = &admin.SystemMetadata{} } executionSpec.Metadata.SystemMetadata.ExecutionCluster = cluster @@ -214,64 +214,64 @@ func UpdateExecutionModelState( if err != nil { return flyteErrs.NewFlyteAdminErrorf(codes.Internal, "Failed to unmarshal execution closure: %v", err) } - executionClosure.Phase = request.Event.Phase - executionClosure.UpdatedAt = request.Event.OccurredAt - execution.Phase = request.Event.Phase.String() + executionClosure.Phase = request.GetEvent().GetPhase() + executionClosure.UpdatedAt = request.GetEvent().GetOccurredAt() + execution.Phase = request.GetEvent().GetPhase().String() - occurredAtTimestamp, err := ptypes.Timestamp(request.Event.OccurredAt) + occurredAtTimestamp, err := ptypes.Timestamp(request.GetEvent().GetOccurredAt()) if err != nil { return flyteErrs.NewFlyteAdminErrorf(codes.Internal, "Failed to parse OccurredAt: %v", err) } execution.ExecutionUpdatedAt = &occurredAtTimestamp // only mark the execution started when we get the initial running event - if request.Event.Phase == core.WorkflowExecution_RUNNING { + if request.GetEvent().GetPhase() == core.WorkflowExecution_RUNNING { execution.StartedAt = &occurredAtTimestamp - executionClosure.StartedAt = request.Event.OccurredAt - } else if common.IsExecutionTerminal(request.Event.Phase) { + executionClosure.StartedAt = request.GetEvent().GetOccurredAt() + } else if common.IsExecutionTerminal(request.GetEvent().GetPhase()) { if execution.StartedAt != nil { execution.Duration = occurredAtTimestamp.Sub(*execution.StartedAt) executionClosure.Duration = ptypes.DurationProto(execution.Duration) } else { logger.Infof(context.Background(), - "Cannot compute duration because startedAt was never set, requestId: %v", request.RequestId) + "Cannot compute duration because startedAt was never set, requestId: %v", request.GetRequestId()) } } // Default or empty cluster values do not require updating the execution model. - ignoreClusterFromEvent := len(request.Event.ProducerId) == 0 || request.Event.ProducerId == common.DefaultProducerID - logger.Debugf(ctx, "Producer Id [%v]. IgnoreClusterFromEvent [%v]", request.Event.ProducerId, ignoreClusterFromEvent) + ignoreClusterFromEvent := len(request.GetEvent().GetProducerId()) == 0 || request.GetEvent().GetProducerId() == common.DefaultProducerID + logger.Debugf(ctx, "Producer Id [%v]. IgnoreClusterFromEvent [%v]", request.GetEvent().GetProducerId(), ignoreClusterFromEvent) if !ignoreClusterFromEvent { if clusterReassignablePhases.Has(execution.Phase) { - if err := reassignCluster(ctx, request.Event.ProducerId, request.Event.ExecutionId, execution); err != nil { + if err := reassignCluster(ctx, request.GetEvent().GetProducerId(), request.GetEvent().GetExecutionId(), execution); err != nil { return err } - } else if execution.Cluster != request.Event.ProducerId { + } else if execution.Cluster != request.GetEvent().GetProducerId() { errorMsg := fmt.Sprintf("Cannot accept events for running/terminated execution [%v] from cluster [%s],"+ "expected events to originate from [%s]", - request.Event.ExecutionId, request.Event.ProducerId, execution.Cluster) + request.GetEvent().GetExecutionId(), request.GetEvent().GetProducerId(), execution.Cluster) return flyteErrs.NewIncompatibleClusterError(ctx, errorMsg, execution.Cluster) } } - if request.Event.GetOutputUri() != "" { + if request.GetEvent().GetOutputUri() != "" { executionClosure.OutputResult = &admin.ExecutionClosure_Outputs{ Outputs: &admin.LiteralMapBlob{ Data: &admin.LiteralMapBlob_Uri{ - Uri: request.Event.GetOutputUri(), + Uri: request.GetEvent().GetOutputUri(), }, }, } - } else if request.Event.GetOutputData() != nil { + } else if request.GetEvent().GetOutputData() != nil { switch inlineEventDataPolicy { case interfaces.InlineEventDataPolicyStoreInline: executionClosure.OutputResult = &admin.ExecutionClosure_OutputData{ - OutputData: request.Event.GetOutputData(), + OutputData: request.GetEvent().GetOutputData(), } default: logger.Debugf(ctx, "Offloading outputs per InlineEventDataPolicy") - uri, err := common.OffloadLiteralMap(ctx, storageClient, request.Event.GetOutputData(), - request.Event.ExecutionId.Project, request.Event.ExecutionId.Domain, request.Event.ExecutionId.Name, OutputsObjectSuffix) + uri, err := common.OffloadLiteralMap(ctx, storageClient, request.GetEvent().GetOutputData(), + request.GetEvent().GetExecutionId().GetProject(), request.GetEvent().GetExecutionId().GetDomain(), request.GetEvent().GetExecutionId().GetName(), OutputsObjectSuffix) if err != nil { return err } @@ -283,11 +283,11 @@ func UpdateExecutionModelState( }, } } - } else if request.Event.GetError() != nil { + } else if request.GetEvent().GetError() != nil { executionClosure.OutputResult = &admin.ExecutionClosure_Error{ - Error: request.Event.GetError(), + Error: request.GetEvent().GetError(), } - k := request.Event.GetError().Kind.String() + k := request.GetEvent().GetError().GetKind().String() execution.ErrorKind = &k execution.ErrorCode = &request.Event.GetError().Code } @@ -372,13 +372,13 @@ func FromExecutionModel(ctx context.Context, executionModel models.Execution, op return nil, flyteErrs.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal spec") } if len(opts.DefaultNamespace) > 0 { - if spec.Metadata == nil { + if spec.GetMetadata() == nil { spec.Metadata = &admin.ExecutionMetadata{} } - if spec.Metadata.SystemMetadata == nil { + if spec.GetMetadata().GetSystemMetadata() == nil { spec.Metadata.SystemMetadata = &admin.SystemMetadata{} } - if len(spec.GetMetadata().GetSystemMetadata().Namespace) == 0 { + if len(spec.GetMetadata().GetSystemMetadata().GetNamespace()) == 0 { logger.Infof(ctx, "setting execution system metadata namespace to [%s]", opts.DefaultNamespace) spec.Metadata.SystemMetadata.Namespace = opts.DefaultNamespace } @@ -388,7 +388,7 @@ func FromExecutionModel(ctx context.Context, executionModel models.Execution, op if err = proto.Unmarshal(executionModel.Closure, &closure); err != nil { return nil, flyteErrs.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal closure") } - if closure.GetError() != nil && opts != nil && opts.TrimErrorMessage && len(closure.GetError().Message) > 0 { + if closure.GetError() != nil && opts != nil && opts.TrimErrorMessage && len(closure.GetError().GetMessage()) > 0 { trimmedErrOutputResult := closure.GetError() trimmedErrMessage := TrimErrorMessage(trimmedErrOutputResult.GetMessage()) trimmedErrOutputResult.Message = trimmedErrMessage @@ -397,7 +397,7 @@ func FromExecutionModel(ctx context.Context, executionModel models.Execution, op } } - if closure.StateChangeDetails == nil { + if closure.GetStateChangeDetails() == nil { // Update execution state details from model for older executions if closure.StateChangeDetails, err = PopulateDefaultStateChangeDetails(executionModel); err != nil { return nil, err diff --git a/flyteadmin/pkg/repositories/transformers/execution_event.go b/flyteadmin/pkg/repositories/transformers/execution_event.go index 34f3c4d84a..f603af44e6 100644 --- a/flyteadmin/pkg/repositories/transformers/execution_event.go +++ b/flyteadmin/pkg/repositories/transformers/execution_event.go @@ -11,18 +11,18 @@ import ( // Transforms a ExecutionEventCreateRequest to a ExecutionEvent model func CreateExecutionEventModel(request *admin.WorkflowExecutionEventRequest) (*models.ExecutionEvent, error) { - occurredAt, err := ptypes.Timestamp(request.Event.OccurredAt) + occurredAt, err := ptypes.Timestamp(request.GetEvent().GetOccurredAt()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to marshal occurred at timestamp") } return &models.ExecutionEvent{ ExecutionKey: models.ExecutionKey{ - Project: request.Event.ExecutionId.Project, - Domain: request.Event.ExecutionId.Domain, - Name: request.Event.ExecutionId.Name, + Project: request.GetEvent().GetExecutionId().GetProject(), + Domain: request.GetEvent().GetExecutionId().GetDomain(), + Name: request.GetEvent().GetExecutionId().GetName(), }, - RequestID: request.RequestId, + RequestID: request.GetRequestId(), OccurredAt: occurredAt, - Phase: request.Event.Phase.String(), + Phase: request.GetEvent().GetPhase().String(), }, nil } diff --git a/flyteadmin/pkg/repositories/transformers/execution_test.go b/flyteadmin/pkg/repositories/transformers/execution_test.go index c7b9f33e95..5ea50cefe4 100644 --- a/flyteadmin/pkg/repositories/transformers/execution_test.go +++ b/flyteadmin/pkg/repositories/transformers/execution_test.go @@ -78,7 +78,7 @@ func TestCreateExecutionModel(t *testing.T) { Domain: "domain", Name: "name", }, - RequestSpec: execRequest.Spec, + RequestSpec: execRequest.GetSpec(), LaunchPlanID: lpID, WorkflowID: wfID, CreatedAt: createdAt, @@ -103,7 +103,7 @@ func TestCreateExecutionModel(t *testing.T) { assert.Equal(t, sourceID, execution.SourceExecutionID) assert.Equal(t, "launch_plan", execution.LaunchEntity) assert.Equal(t, execution.Phase, core.WorkflowExecution_UNDEFINED.String()) - expectedSpec := execRequest.Spec + expectedSpec := execRequest.GetSpec() expectedSpec.Metadata.Principal = principal expectedSpec.Metadata.SystemMetadata = &admin.SystemMetadata{ ExecutionCluster: cluster, @@ -136,7 +136,7 @@ func TestCreateExecutionModel(t *testing.T) { Domain: "domain", Name: "name", }, - RequestSpec: execRequest.Spec, + RequestSpec: execRequest.GetSpec(), LaunchPlanID: lpID, WorkflowID: wfID, CreatedAt: createdAt, @@ -162,7 +162,7 @@ func TestCreateExecutionModel(t *testing.T) { assert.Equal(t, sourceID, execution.SourceExecutionID) assert.Equal(t, "launch_plan", execution.LaunchEntity) assert.Equal(t, core.WorkflowExecution_FAILED.String(), execution.Phase) - expectedSpec := execRequest.Spec + expectedSpec := execRequest.GetSpec() expectedSpec.Metadata.Principal = principal expectedSpec.Metadata.SystemMetadata = &admin.SystemMetadata{ ExecutionCluster: cluster, @@ -202,7 +202,7 @@ func TestCreateExecutionModel(t *testing.T) { Domain: "domain", Name: "name", }, - RequestSpec: execRequest.Spec, + RequestSpec: execRequest.GetSpec(), LaunchPlanID: lpID, WorkflowID: wfID, CreatedAt: createdAt, @@ -228,7 +228,7 @@ func TestCreateExecutionModel(t *testing.T) { assert.Equal(t, sourceID, execution.SourceExecutionID) assert.Equal(t, "launch_plan", execution.LaunchEntity) assert.Equal(t, core.WorkflowExecution_FAILED.String(), execution.Phase) - expectedSpec := execRequest.Spec + expectedSpec := execRequest.GetSpec() expectedSpec.Metadata.Principal = principal expectedSpec.Metadata.SystemMetadata = &admin.SystemMetadata{ ExecutionCluster: cluster, @@ -268,7 +268,7 @@ func TestCreateExecutionModel(t *testing.T) { Domain: "domain", Name: "name", }, - RequestSpec: execRequest.Spec, + RequestSpec: execRequest.GetSpec(), LaunchPlanID: lpID, WorkflowID: wfID, CreatedAt: createdAt, @@ -294,7 +294,7 @@ func TestCreateExecutionModel(t *testing.T) { assert.Equal(t, sourceID, execution.SourceExecutionID) assert.Equal(t, "launch_plan", execution.LaunchEntity) assert.Equal(t, core.WorkflowExecution_FAILED.String(), execution.Phase) - expectedSpec := execRequest.Spec + expectedSpec := execRequest.GetSpec() expectedSpec.Metadata.Principal = principal expectedSpec.Metadata.SystemMetadata = &admin.SystemMetadata{ ExecutionCluster: cluster, @@ -341,7 +341,7 @@ func TestUpdateModelState_UnknownToRunning(t *testing.T) { Phase: core.WorkflowExecution_UNDEFINED, CreatedAt: createdAtProto, } - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() specBytes, _ := proto.Marshal(spec) existingClosureBytes, _ := proto.Marshal(&existingClosure) startedAt := time.Now() @@ -401,7 +401,7 @@ func TestUpdateModelState_RunningToFailed(t *testing.T) { } ec := "foo" ek := core.ExecutionError_SYSTEM - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() specBytes, _ := proto.Marshal(spec) existingClosureBytes, _ := proto.Marshal(&existingClosure) executionModel := getRunningExecutionModel(specBytes, existingClosureBytes, startedAt) @@ -474,7 +474,7 @@ func TestUpdateModelState_RunningToSuccess(t *testing.T) { Phase: core.WorkflowExecution_RUNNING, StartedAt: startedAtProto, } - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() specBytes, _ := proto.Marshal(spec) existingClosureBytes, _ := proto.Marshal(&existingClosure) executionModel := getRunningExecutionModel(specBytes, existingClosureBytes, startedAt) @@ -692,7 +692,7 @@ func TestGetExecutionIdentifier(t *testing.T) { } func TestFromExecutionModel(t *testing.T) { - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() specBytes, _ := proto.Marshal(spec) phase := core.WorkflowExecution_RUNNING.String() startedAt := time.Date(2018, 8, 30, 0, 0, 0, 0, time.UTC) @@ -700,7 +700,7 @@ func TestFromExecutionModel(t *testing.T) { startedAtProto, _ := ptypes.TimestampProto(startedAt) createdAtProto, _ := ptypes.TimestampProto(createdAt) closure := admin.ExecutionClosure{ - ComputedInputs: spec.Inputs, + ComputedInputs: spec.GetInputs(), Phase: core.WorkflowExecution_RUNNING, StartedAt: startedAtProto, StateChangeDetails: &admin.ExecutionStateChangeDetails{ @@ -758,15 +758,15 @@ func TestFromExecutionModel_Aborted(t *testing.T) { } execution, err := FromExecutionModel(context.TODO(), executionModel, DefaultExecutionTransformerOptions) assert.Nil(t, err) - assert.Equal(t, core.WorkflowExecution_ABORTED, execution.Closure.Phase) + assert.Equal(t, core.WorkflowExecution_ABORTED, execution.GetClosure().GetPhase()) assert.True(t, proto.Equal(&admin.AbortMetadata{ Cause: abortCause, - }, execution.Closure.GetAbortMetadata())) + }, execution.GetClosure().GetAbortMetadata())) executionModel.Phase = core.WorkflowExecution_RUNNING.String() execution, err = FromExecutionModel(context.TODO(), executionModel, DefaultExecutionTransformerOptions) assert.Nil(t, err) - assert.Empty(t, execution.Closure.GetAbortCause()) + assert.Empty(t, execution.GetClosure().GetAbortCause()) } func TestFromExecutionModel_Error(t *testing.T) { @@ -795,8 +795,8 @@ func TestFromExecutionModel_Error(t *testing.T) { expectedExecErr := execErr expectedExecErr.Message = string(make([]byte, trimmedErrMessageLen)) assert.Nil(t, err) - assert.Equal(t, core.WorkflowExecution_FAILED, execution.Closure.Phase) - assert.True(t, proto.Equal(expectedExecErr, execution.Closure.GetError())) + assert.Equal(t, core.WorkflowExecution_FAILED, execution.GetClosure().GetPhase()) + assert.True(t, proto.Equal(expectedExecErr, execution.GetClosure().GetError())) } func TestFromExecutionModel_ValidUTF8TrimmedErrorMsg(t *testing.T) { @@ -843,11 +843,11 @@ func TestFromExecutionModel_OverwriteNamespace(t *testing.T) { DefaultNamespace: overwrittenNamespace, }) assert.NoError(t, err) - assert.Equal(t, execution.GetSpec().GetMetadata().GetSystemMetadata().Namespace, overwrittenNamespace) + assert.Equal(t, execution.GetSpec().GetMetadata().GetSystemMetadata().GetNamespace(), overwrittenNamespace) } func TestFromExecutionModels(t *testing.T) { - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() specBytes, _ := proto.Marshal(spec) phase := core.WorkflowExecution_SUCCEEDED.String() startedAt := time.Date(2018, 8, 30, 0, 0, 0, 0, time.UTC) @@ -857,7 +857,7 @@ func TestFromExecutionModels(t *testing.T) { duration := 2 * time.Minute durationProto := ptypes.DurationProto(duration) closure := admin.ExecutionClosure{ - ComputedInputs: spec.Inputs, + ComputedInputs: spec.GetInputs(), Phase: core.WorkflowExecution_RUNNING, StartedAt: startedAtProto, Duration: durationProto, @@ -914,7 +914,7 @@ func TestUpdateModelState_WithClusterInformation(t *testing.T) { Phase: core.WorkflowExecution_UNDEFINED, CreatedAt: createdAtProto, } - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() specBytes, _ := proto.Marshal(spec) existingClosureBytes, _ := proto.Marshal(&existingClosure) startedAt := time.Now() @@ -982,7 +982,7 @@ func TestReassignCluster(t *testing.T) { } t.Run("happy case", func(t *testing.T) { - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() spec.Metadata = &admin.ExecutionMetadata{ SystemMetadata: &admin.SystemMetadata{ ExecutionCluster: oldCluster, @@ -1000,10 +1000,10 @@ func TestReassignCluster(t *testing.T) { var updatedSpec admin.ExecutionSpec err = proto.Unmarshal(executionModel.Spec, &updatedSpec) assert.NoError(t, err) - assert.Equal(t, newCluster, updatedSpec.Metadata.SystemMetadata.ExecutionCluster) + assert.Equal(t, newCluster, updatedSpec.GetMetadata().GetSystemMetadata().GetExecutionCluster()) }) t.Run("happy case - initialize cluster", func(t *testing.T) { - spec := testutils.GetExecutionRequest().Spec + spec := testutils.GetExecutionRequest().GetSpec() specBytes, _ := proto.Marshal(spec) executionModel := models.Execution{ Spec: specBytes, @@ -1015,7 +1015,7 @@ func TestReassignCluster(t *testing.T) { var updatedSpec admin.ExecutionSpec err = proto.Unmarshal(executionModel.Spec, &updatedSpec) assert.NoError(t, err) - assert.Equal(t, newCluster, updatedSpec.Metadata.SystemMetadata.ExecutionCluster) + assert.Equal(t, newCluster, updatedSpec.GetMetadata().GetSystemMetadata().GetExecutionCluster()) }) t.Run("invalid existing spec", func(t *testing.T) { executionModel := models.Execution{ @@ -1040,9 +1040,9 @@ func TestGetExecutionStateFromModel(t *testing.T) { executionStatus, err := PopulateDefaultStateChangeDetails(executionModel) assert.Nil(t, err) assert.NotNil(t, executionStatus) - assert.Equal(t, admin.ExecutionState_EXECUTION_ACTIVE, executionStatus.State) - assert.NotNil(t, executionStatus.OccurredAt) - assert.Equal(t, createdAtProto, executionStatus.OccurredAt) + assert.Equal(t, admin.ExecutionState_EXECUTION_ACTIVE, executionStatus.GetState()) + assert.NotNil(t, executionStatus.GetOccurredAt()) + assert.Equal(t, createdAtProto, executionStatus.GetOccurredAt()) }) t.Run("incorrect created at", func(t *testing.T) { createdAt := time.Unix(math.MinInt64, math.MinInt32).UTC() @@ -1072,10 +1072,10 @@ func TestUpdateExecutionModelStateChangeDetails(t *testing.T) { err = proto.Unmarshal(execModel.Closure, closure) assert.Nil(t, err) assert.NotNil(t, closure) - assert.NotNil(t, closure.StateChangeDetails) - assert.Equal(t, admin.ExecutionState_EXECUTION_ARCHIVED, closure.StateChangeDetails.State) - assert.Equal(t, "dummyUser", closure.StateChangeDetails.Principal) - assert.Equal(t, statetUpdateAtProto, closure.StateChangeDetails.OccurredAt) + assert.NotNil(t, closure.GetStateChangeDetails()) + assert.Equal(t, admin.ExecutionState_EXECUTION_ARCHIVED, closure.GetStateChangeDetails().GetState()) + assert.Equal(t, "dummyUser", closure.GetStateChangeDetails().GetPrincipal()) + assert.Equal(t, statetUpdateAtProto, closure.GetStateChangeDetails().GetOccurredAt()) }) t.Run("bad closure", func(t *testing.T) { diff --git a/flyteadmin/pkg/repositories/transformers/launch_plan.go b/flyteadmin/pkg/repositories/transformers/launch_plan.go index a7b33736d1..acfa14282a 100644 --- a/flyteadmin/pkg/repositories/transformers/launch_plan.go +++ b/flyteadmin/pkg/repositories/transformers/launch_plan.go @@ -16,10 +16,10 @@ func CreateLaunchPlan( expectedOutputs *core.VariableMap) *admin.LaunchPlan { return &admin.LaunchPlan{ - Id: request.Id, - Spec: request.Spec, + Id: request.GetId(), + Spec: request.GetSpec(), Closure: &admin.LaunchPlanClosure{ - ExpectedInputs: request.Spec.DefaultInputs, + ExpectedInputs: request.GetSpec().GetDefaultInputs(), ExpectedOutputs: expectedOutputs, }, } @@ -31,22 +31,22 @@ func CreateLaunchPlanModel( workflowRepoID uint, digest []byte, initState admin.LaunchPlanState) (models.LaunchPlan, error) { - spec, err := proto.Marshal(launchPlan.Spec) + spec, err := proto.Marshal(launchPlan.GetSpec()) if err != nil { return models.LaunchPlan{}, errors.NewFlyteAdminError(codes.Internal, "Failed to serialize launch plan spec") } - closure, err := proto.Marshal(launchPlan.Closure) + closure, err := proto.Marshal(launchPlan.GetClosure()) if err != nil { return models.LaunchPlan{}, errors.NewFlyteAdminError(codes.Internal, "Failed to serialize launch plan closure") } var launchConditionType models.LaunchConditionType scheduleType := models.LaunchPlanScheduleTypeNONE - if launchPlan.Spec.EntityMetadata != nil && launchPlan.Spec.EntityMetadata.Schedule != nil { - if launchPlan.Spec.EntityMetadata.Schedule.GetCronExpression() != "" || launchPlan.Spec.EntityMetadata.Schedule.GetCronSchedule() != nil { + if launchPlan.GetSpec().GetEntityMetadata() != nil && launchPlan.GetSpec().GetEntityMetadata().GetSchedule() != nil { + if launchPlan.GetSpec().GetEntityMetadata().GetSchedule().GetCronExpression() != "" || launchPlan.GetSpec().GetEntityMetadata().GetSchedule().GetCronSchedule() != nil { scheduleType = models.LaunchPlanScheduleTypeCRON launchConditionType = models.LaunchConditionTypeSCHED - } else if launchPlan.Spec.EntityMetadata.Schedule.GetRate() != nil { + } else if launchPlan.GetSpec().GetEntityMetadata().GetSchedule().GetRate() != nil { scheduleType = models.LaunchPlanScheduleTypeRATE launchConditionType = models.LaunchConditionTypeSCHED } @@ -56,10 +56,10 @@ func CreateLaunchPlanModel( lpModel := models.LaunchPlan{ LaunchPlanKey: models.LaunchPlanKey{ - Project: launchPlan.Id.Project, - Domain: launchPlan.Id.Domain, - Name: launchPlan.Id.Name, - Version: launchPlan.Id.Version, + Project: launchPlan.GetId().GetProject(), + Domain: launchPlan.GetId().GetDomain(), + Name: launchPlan.GetId().GetName(), + Version: launchPlan.GetId().GetVersion(), }, Spec: spec, State: &state, diff --git a/flyteadmin/pkg/repositories/transformers/launch_plan_test.go b/flyteadmin/pkg/repositories/transformers/launch_plan_test.go index 65846de42b..c4551f89e5 100644 --- a/flyteadmin/pkg/repositories/transformers/launch_plan_test.go +++ b/flyteadmin/pkg/repositories/transformers/launch_plan_test.go @@ -39,8 +39,8 @@ func TestCreateLaunchPlan(t *testing.T) { launchPlan := CreateLaunchPlan(request, expectedOutputs) assert.True(t, proto.Equal( &admin.LaunchPlan{ - Id: request.Id, - Spec: request.Spec, + Id: request.GetId(), + Spec: request.GetSpec(), Closure: &admin.LaunchPlanClosure{ ExpectedInputs: expectedInputs, ExpectedOutputs: expectedOutputs, @@ -54,8 +54,8 @@ func TestToLaunchPlanModel(t *testing.T) { launchPlanDigest := []byte("launch plan") launchPlan := &admin.LaunchPlan{ - Id: lpRequest.Id, - Spec: lpRequest.Spec, + Id: lpRequest.GetId(), + Spec: lpRequest.GetSpec(), Closure: &admin.LaunchPlanClosure{ ExpectedInputs: expectedInputs, ExpectedOutputs: expectedOutputs, @@ -70,11 +70,11 @@ func TestToLaunchPlanModel(t *testing.T) { assert.Equal(t, "version", launchPlanModel.Version) assert.Equal(t, workflowID, launchPlanModel.WorkflowID) - expectedSpec, _ := proto.Marshal(lpRequest.Spec) + expectedSpec, _ := proto.Marshal(lpRequest.GetSpec()) assert.Equal(t, expectedSpec, launchPlanModel.Spec) assert.Equal(t, models.LaunchPlanScheduleTypeNONE, launchPlanModel.ScheduleType) - expectedClosure := launchPlan.Closure + expectedClosure := launchPlan.GetClosure() var actualClosure admin.LaunchPlanClosure err = proto.Unmarshal(launchPlanModel.Closure, &actualClosure) @@ -101,8 +101,8 @@ func testLaunchPlanWithCronInternal(t *testing.T, lpRequest *admin.LaunchPlanCre launchPlanDigest := []byte("launch plan") launchPlan := &admin.LaunchPlan{ - Id: lpRequest.Id, - Spec: lpRequest.Spec, + Id: lpRequest.GetId(), + Spec: lpRequest.GetSpec(), Closure: &admin.LaunchPlanClosure{ ExpectedInputs: expectedInputs, ExpectedOutputs: expectedOutputs, @@ -117,11 +117,11 @@ func testLaunchPlanWithCronInternal(t *testing.T, lpRequest *admin.LaunchPlanCre assert.Equal(t, "version", launchPlanModel.Version) assert.Equal(t, workflowID, launchPlanModel.WorkflowID) - expectedSpec, _ := proto.Marshal(lpRequest.Spec) + expectedSpec, _ := proto.Marshal(lpRequest.GetSpec()) assert.Equal(t, expectedSpec, launchPlanModel.Spec) assert.Equal(t, models.LaunchPlanScheduleTypeCRON, launchPlanModel.ScheduleType) - expectedClosure := launchPlan.Closure + expectedClosure := launchPlan.GetClosure() var actualClosure admin.LaunchPlanClosure err = proto.Unmarshal(launchPlanModel.Closure, &actualClosure) @@ -137,8 +137,8 @@ func TestToLaunchPlanModelWithFixedRateSchedule(t *testing.T) { launchPlanDigest := []byte("launch plan") launchPlan := &admin.LaunchPlan{ - Id: lpRequest.Id, - Spec: lpRequest.Spec, + Id: lpRequest.GetId(), + Spec: lpRequest.GetSpec(), Closure: &admin.LaunchPlanClosure{ ExpectedInputs: expectedInputs, ExpectedOutputs: expectedOutputs, @@ -153,11 +153,11 @@ func TestToLaunchPlanModelWithFixedRateSchedule(t *testing.T) { assert.Equal(t, "version", launchPlanModel.Version) assert.Equal(t, workflowID, launchPlanModel.WorkflowID) - expectedSpec, _ := proto.Marshal(lpRequest.Spec) + expectedSpec, _ := proto.Marshal(lpRequest.GetSpec()) assert.Equal(t, expectedSpec, launchPlanModel.Spec) assert.Equal(t, models.LaunchPlanScheduleTypeRATE, launchPlanModel.ScheduleType) - expectedClosure := launchPlan.Closure + expectedClosure := launchPlan.GetClosure() var actualClosure admin.LaunchPlanClosure err = proto.Unmarshal(launchPlanModel.Closure, &actualClosure) @@ -174,13 +174,13 @@ func TestFromLaunchPlanModel(t *testing.T) { updatedAt := createdAt.Add(time.Minute) updatedAtProto, _ := ptypes.TimestampProto(updatedAt) closure := admin.LaunchPlanClosure{ - ExpectedInputs: lpRequest.Spec.DefaultInputs, - ExpectedOutputs: workflowRequest.Spec.Template.Interface.Outputs, + ExpectedInputs: lpRequest.GetSpec().GetDefaultInputs(), + ExpectedOutputs: workflowRequest.GetSpec().GetTemplate().GetInterface().GetOutputs(), CreatedAt: createdAtProto, UpdatedAt: updatedAtProto, State: admin.LaunchPlanState_ACTIVE, } - specBytes, _ := proto.Marshal(lpRequest.Spec) + specBytes, _ := proto.Marshal(lpRequest.GetSpec()) closureBytes, _ := proto.Marshal(&closure) model := models.LaunchPlan{ @@ -206,9 +206,9 @@ func TestFromLaunchPlanModel(t *testing.T) { Domain: "domain", Name: "name", Version: "version", - }, lp.Id)) - assert.True(t, proto.Equal(&closure, lp.Closure)) - assert.True(t, proto.Equal(lpRequest.Spec, lp.Spec)) + }, lp.GetId())) + assert.True(t, proto.Equal(&closure, lp.GetClosure())) + assert.True(t, proto.Equal(lpRequest.GetSpec(), lp.GetSpec())) } func TestFromLaunchPlanModels(t *testing.T) { @@ -220,13 +220,13 @@ func TestFromLaunchPlanModels(t *testing.T) { updatedAt := createdAt.Add(time.Minute) updatedAtProto, _ := ptypes.TimestampProto(updatedAt) closure := admin.LaunchPlanClosure{ - ExpectedInputs: lpRequest.Spec.DefaultInputs, - ExpectedOutputs: workflowRequest.Spec.Template.Interface.Outputs, + ExpectedInputs: lpRequest.GetSpec().GetDefaultInputs(), + ExpectedOutputs: workflowRequest.GetSpec().GetTemplate().GetInterface().GetOutputs(), CreatedAt: createdAtProto, UpdatedAt: updatedAtProto, State: admin.LaunchPlanState_ACTIVE, } - specBytes, _ := proto.Marshal(lpRequest.Spec) + specBytes, _ := proto.Marshal(lpRequest.GetSpec()) closureBytes, _ := proto.Marshal(&closure) m1 := models.LaunchPlan{ @@ -272,7 +272,7 @@ func TestFromLaunchPlanModels(t *testing.T) { Domain: "staging", Name: "othername", Version: "versionsecond", - }, lp[1].Id)) - assert.True(t, proto.Equal(&closure, lp[1].Closure)) - assert.True(t, proto.Equal(lpRequest.Spec, lp[1].Spec)) + }, lp[1].GetId())) + assert.True(t, proto.Equal(&closure, lp[1].GetClosure())) + assert.True(t, proto.Equal(lpRequest.GetSpec(), lp[1].GetSpec())) } diff --git a/flyteadmin/pkg/repositories/transformers/named_entity.go b/flyteadmin/pkg/repositories/transformers/named_entity.go index 14c5818786..646d673923 100644 --- a/flyteadmin/pkg/repositories/transformers/named_entity.go +++ b/flyteadmin/pkg/repositories/transformers/named_entity.go @@ -6,16 +6,16 @@ import ( ) func CreateNamedEntityModel(request *admin.NamedEntityUpdateRequest) models.NamedEntity { - stateInt := int32(request.Metadata.State) + stateInt := int32(request.GetMetadata().GetState()) return models.NamedEntity{ NamedEntityKey: models.NamedEntityKey{ - ResourceType: request.ResourceType, - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, + ResourceType: request.GetResourceType(), + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), }, NamedEntityMetadataFields: models.NamedEntityMetadataFields{ - Description: request.Metadata.Description, + Description: request.GetMetadata().GetDescription(), State: &stateInt, }, } diff --git a/flyteadmin/pkg/repositories/transformers/node_execution.go b/flyteadmin/pkg/repositories/transformers/node_execution.go index 817f53290a..107e9efb70 100644 --- a/flyteadmin/pkg/repositories/transformers/node_execution.go +++ b/flyteadmin/pkg/repositories/transformers/node_execution.go @@ -30,7 +30,7 @@ type ToNodeExecutionModelInput struct { func addNodeRunningState(request *admin.NodeExecutionEventRequest, nodeExecutionModel *models.NodeExecution, closure *admin.NodeExecutionClosure) error { - occurredAt, err := ptypes.Timestamp(request.Event.OccurredAt) + occurredAt, err := ptypes.Timestamp(request.GetEvent().GetOccurredAt()) if err != nil { return errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal occurredAt with error: %v", err) } @@ -49,10 +49,10 @@ func addTerminalState( ctx context.Context, request *admin.NodeExecutionEventRequest, nodeExecutionModel *models.NodeExecution, closure *admin.NodeExecutionClosure, inlineEventDataPolicy interfaces.InlineEventDataPolicy, storageClient *storage.DataStore) error { - if closure.StartedAt == nil { + if closure.GetStartedAt() == nil { logger.Warning(context.Background(), "node execution is missing StartedAt") } else { - endTime, err := ptypes.Timestamp(request.Event.OccurredAt) + endTime, err := ptypes.Timestamp(request.GetEvent().GetOccurredAt()) if err != nil { return errors.NewFlyteAdminErrorf( codes.Internal, "Failed to parse node execution occurred at timestamp: %v", err) @@ -62,21 +62,21 @@ func addTerminalState( } // Serialize output results (if they exist) - if request.Event.GetOutputUri() != "" { + if request.GetEvent().GetOutputUri() != "" { closure.OutputResult = &admin.NodeExecutionClosure_OutputUri{ - OutputUri: request.Event.GetOutputUri(), + OutputUri: request.GetEvent().GetOutputUri(), } - } else if request.Event.GetOutputData() != nil { + } else if request.GetEvent().GetOutputData() != nil { switch inlineEventDataPolicy { case interfaces.InlineEventDataPolicyStoreInline: closure.OutputResult = &admin.NodeExecutionClosure_OutputData{ - OutputData: request.Event.GetOutputData(), + OutputData: request.GetEvent().GetOutputData(), } default: logger.Debugf(ctx, "Offloading outputs per InlineEventDataPolicy") - uri, err := common.OffloadLiteralMap(ctx, storageClient, request.Event.GetOutputData(), - request.Event.Id.ExecutionId.Project, request.Event.Id.ExecutionId.Domain, request.Event.Id.ExecutionId.Name, - request.Event.Id.NodeId, OutputsObjectSuffix) + uri, err := common.OffloadLiteralMap(ctx, storageClient, request.GetEvent().GetOutputData(), + request.GetEvent().GetId().GetExecutionId().GetProject(), request.GetEvent().GetId().GetExecutionId().GetDomain(), request.GetEvent().GetId().GetExecutionId().GetName(), + request.GetEvent().GetId().GetNodeId(), OutputsObjectSuffix) if err != nil { return err } @@ -84,15 +84,15 @@ func addTerminalState( OutputUri: uri.String(), } } - } else if request.Event.GetError() != nil { + } else if request.GetEvent().GetError() != nil { closure.OutputResult = &admin.NodeExecutionClosure_Error{ - Error: request.Event.GetError(), + Error: request.GetEvent().GetError(), } - k := request.Event.GetError().Kind.String() + k := request.GetEvent().GetError().GetKind().String() nodeExecutionModel.ErrorKind = &k nodeExecutionModel.ErrorCode = &request.Event.GetError().Code } - closure.DeckUri = request.Event.DeckUri + closure.DeckUri = request.GetEvent().GetDeckUri() return nil } @@ -100,47 +100,47 @@ func addTerminalState( func CreateNodeExecutionModel(ctx context.Context, input ToNodeExecutionModelInput) (*models.NodeExecution, error) { nodeExecution := &models.NodeExecution{ NodeExecutionKey: models.NodeExecutionKey{ - NodeID: input.Request.Event.Id.NodeId, + NodeID: input.Request.GetEvent().GetId().GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: input.Request.Event.Id.ExecutionId.Project, - Domain: input.Request.Event.Id.ExecutionId.Domain, - Name: input.Request.Event.Id.ExecutionId.Name, + Project: input.Request.GetEvent().GetId().GetExecutionId().GetProject(), + Domain: input.Request.GetEvent().GetId().GetExecutionId().GetDomain(), + Name: input.Request.GetEvent().GetId().GetExecutionId().GetName(), }, }, - Phase: input.Request.Event.Phase.String(), + Phase: input.Request.GetEvent().GetPhase().String(), } - reportedAt := input.Request.Event.ReportedAt - if reportedAt == nil || (reportedAt.Seconds == 0 && reportedAt.Nanos == 0) { - reportedAt = input.Request.Event.OccurredAt + reportedAt := input.Request.GetEvent().GetReportedAt() + if reportedAt == nil || (reportedAt.GetSeconds() == 0 && reportedAt.GetNanos() == 0) { + reportedAt = input.Request.GetEvent().GetOccurredAt() } closure := admin.NodeExecutionClosure{ - Phase: input.Request.Event.Phase, - CreatedAt: input.Request.Event.OccurredAt, + Phase: input.Request.GetEvent().GetPhase(), + CreatedAt: input.Request.GetEvent().GetOccurredAt(), UpdatedAt: reportedAt, } nodeExecutionMetadata := admin.NodeExecutionMetaData{ - RetryGroup: input.Request.Event.RetryGroup, - SpecNodeId: input.Request.Event.SpecNodeId, - IsParentNode: input.Request.Event.IsParent, - IsDynamic: input.Request.Event.IsDynamic, - IsArray: input.Request.Event.IsArray, + RetryGroup: input.Request.GetEvent().GetRetryGroup(), + SpecNodeId: input.Request.GetEvent().GetSpecNodeId(), + IsParentNode: input.Request.GetEvent().GetIsParent(), + IsDynamic: input.Request.GetEvent().GetIsDynamic(), + IsArray: input.Request.GetEvent().GetIsArray(), } err := handleNodeExecutionInputs(ctx, nodeExecution, input.Request, input.StorageClient) if err != nil { return nil, err } - if input.Request.Event.Phase == core.NodeExecution_RUNNING { + if input.Request.GetEvent().GetPhase() == core.NodeExecution_RUNNING { err := addNodeRunningState(input.Request, nodeExecution, &closure) if err != nil { return nil, err } } - if common.IsNodeExecutionTerminal(input.Request.Event.Phase) { + if common.IsNodeExecutionTerminal(input.Request.GetEvent().GetPhase()) { err := addTerminalState(ctx, input.Request, nodeExecution, &closure, input.InlineEventDataPolicy, input.StorageClient) if err != nil { return nil, err @@ -148,16 +148,16 @@ func CreateNodeExecutionModel(ctx context.Context, input ToNodeExecutionModelInp } // Update TaskNodeMetadata, which includes caching information today. - if input.Request.Event.GetTaskNodeMetadata() != nil { + if input.Request.GetEvent().GetTaskNodeMetadata() != nil { targetMetadata := &admin.NodeExecutionClosure_TaskNodeMetadata{ TaskNodeMetadata: &admin.TaskNodeMetadata{ - CheckpointUri: input.Request.Event.GetTaskNodeMetadata().CheckpointUri, + CheckpointUri: input.Request.GetEvent().GetTaskNodeMetadata().GetCheckpointUri(), }, } - if input.Request.Event.GetTaskNodeMetadata().CatalogKey != nil { - st := input.Request.Event.GetTaskNodeMetadata().GetCacheStatus().String() - targetMetadata.TaskNodeMetadata.CacheStatus = input.Request.Event.GetTaskNodeMetadata().GetCacheStatus() - targetMetadata.TaskNodeMetadata.CatalogKey = input.Request.Event.GetTaskNodeMetadata().GetCatalogKey() + if input.Request.GetEvent().GetTaskNodeMetadata().GetCatalogKey() != nil { + st := input.Request.GetEvent().GetTaskNodeMetadata().GetCacheStatus().String() + targetMetadata.TaskNodeMetadata.CacheStatus = input.Request.GetEvent().GetTaskNodeMetadata().GetCacheStatus() + targetMetadata.TaskNodeMetadata.CatalogKey = input.Request.GetEvent().GetTaskNodeMetadata().GetCatalogKey() nodeExecution.CacheStatus = &st } closure.TargetMetadata = targetMetadata @@ -175,7 +175,7 @@ func CreateNodeExecutionModel(ctx context.Context, input ToNodeExecutionModelInp } nodeExecution.Closure = marshaledClosure nodeExecution.NodeExecutionMetadata = marshaledNodeExecutionMetadata - nodeExecutionCreatedAt, err := ptypes.Timestamp(input.Request.Event.OccurredAt) + nodeExecutionCreatedAt, err := ptypes.Timestamp(input.Request.GetEvent().GetOccurredAt()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to read event timestamp") } @@ -185,14 +185,14 @@ func CreateNodeExecutionModel(ctx context.Context, input ToNodeExecutionModelInp return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to read event reported_at timestamp") } nodeExecution.NodeExecutionUpdatedAt = &nodeExecutionUpdatedAt - if input.Request.Event.ParentTaskMetadata != nil { + if input.Request.GetEvent().GetParentTaskMetadata() != nil { nodeExecution.ParentTaskExecutionID = input.ParentTaskExecutionID } nodeExecution.ParentID = input.ParentID nodeExecution.DynamicWorkflowRemoteClosureReference = input.DynamicWorkflowRemoteClosure internalData := &genModel.NodeExecutionInternalData{ - EventVersion: input.Request.Event.EventVersion, + EventVersion: input.Request.GetEvent().GetEventVersion(), } internalDataBytes, err := proto.Marshal(internalData) if err != nil { @@ -216,21 +216,21 @@ func UpdateNodeExecutionModel( return errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal node execution closure with error: %+v", err) } - nodeExecutionModel.Phase = request.Event.Phase.String() - nodeExecutionClosure.Phase = request.Event.Phase - reportedAt := request.Event.ReportedAt - if reportedAt == nil || (reportedAt.Seconds == 0 && reportedAt.Nanos == 0) { - reportedAt = request.Event.OccurredAt + nodeExecutionModel.Phase = request.GetEvent().GetPhase().String() + nodeExecutionClosure.Phase = request.GetEvent().GetPhase() + reportedAt := request.GetEvent().GetReportedAt() + if reportedAt == nil || (reportedAt.GetSeconds() == 0 && reportedAt.GetNanos() == 0) { + reportedAt = request.GetEvent().GetOccurredAt() } nodeExecutionClosure.UpdatedAt = reportedAt - if request.Event.Phase == core.NodeExecution_RUNNING { + if request.GetEvent().GetPhase() == core.NodeExecution_RUNNING { err := addNodeRunningState(request, nodeExecutionModel, &nodeExecutionClosure) if err != nil { return err } } - if common.IsNodeExecutionTerminal(request.Event.Phase) { + if common.IsNodeExecutionTerminal(request.GetEvent().GetPhase()) { err := addTerminalState(ctx, request, nodeExecutionModel, &nodeExecutionClosure, inlineEventDataPolicy, storageClient) if err != nil { return err @@ -247,24 +247,24 @@ func UpdateNodeExecutionModel( } // Update TaskNodeMetadata, which includes caching information today. - if request.Event.GetTaskNodeMetadata() != nil { + if request.GetEvent().GetTaskNodeMetadata() != nil { targetMetadata := &admin.NodeExecutionClosure_TaskNodeMetadata{ TaskNodeMetadata: &admin.TaskNodeMetadata{ - CheckpointUri: request.Event.GetTaskNodeMetadata().CheckpointUri, + CheckpointUri: request.GetEvent().GetTaskNodeMetadata().GetCheckpointUri(), }, } - if request.Event.GetTaskNodeMetadata().CatalogKey != nil { - st := request.Event.GetTaskNodeMetadata().GetCacheStatus().String() - targetMetadata.TaskNodeMetadata.CacheStatus = request.Event.GetTaskNodeMetadata().GetCacheStatus() - targetMetadata.TaskNodeMetadata.CatalogKey = request.Event.GetTaskNodeMetadata().GetCatalogKey() + if request.GetEvent().GetTaskNodeMetadata().GetCatalogKey() != nil { + st := request.GetEvent().GetTaskNodeMetadata().GetCacheStatus().String() + targetMetadata.TaskNodeMetadata.CacheStatus = request.GetEvent().GetTaskNodeMetadata().GetCacheStatus() + targetMetadata.TaskNodeMetadata.CatalogKey = request.GetEvent().GetTaskNodeMetadata().GetCatalogKey() nodeExecutionModel.CacheStatus = &st } nodeExecutionClosure.TargetMetadata = targetMetadata // if this is a dynamic task then maintain the DynamicJobSpecUri - dynamicWorkflowMetadata := request.Event.GetTaskNodeMetadata().DynamicWorkflow - if dynamicWorkflowMetadata != nil && len(dynamicWorkflowMetadata.DynamicJobSpecUri) > 0 { - nodeExecutionClosure.DynamicJobSpecUri = dynamicWorkflowMetadata.DynamicJobSpecUri + dynamicWorkflowMetadata := request.GetEvent().GetTaskNodeMetadata().GetDynamicWorkflow() + if dynamicWorkflowMetadata != nil && len(dynamicWorkflowMetadata.GetDynamicJobSpecUri()) > 0 { + nodeExecutionClosure.DynamicJobSpecUri = dynamicWorkflowMetadata.GetDynamicJobSpecUri() } } @@ -284,7 +284,7 @@ func UpdateNodeExecutionModel( // In the case of dynamic nodes reporting DYNAMIC_RUNNING, the IsParent and IsDynamic bits will be set for this event. // Update the node execution metadata accordingly. - if request.Event.IsParent || request.Event.IsDynamic || request.Event.IsArray { + if request.GetEvent().GetIsParent() || request.GetEvent().GetIsDynamic() || request.GetEvent().GetIsArray() { var nodeExecutionMetadata admin.NodeExecutionMetaData if len(nodeExecutionModel.NodeExecutionMetadata) > 0 { if err := proto.Unmarshal(nodeExecutionModel.NodeExecutionMetadata, &nodeExecutionMetadata); err != nil { @@ -294,13 +294,13 @@ func UpdateNodeExecutionModel( } // Not every event sends IsParent and IsDynamic as an artifact of how propeller handles dynamic nodes. // Only explicitly set the fields, when they're set in the event itself. - if request.Event.IsParent { + if request.GetEvent().GetIsParent() { nodeExecutionMetadata.IsParentNode = true } - if request.Event.IsDynamic { + if request.GetEvent().GetIsDynamic() { nodeExecutionMetadata.IsDynamic = true } - if request.Event.IsArray { + if request.GetEvent().GetIsArray() { nodeExecutionMetadata.IsArray = true } nodeExecMetadataBytes, err := proto.Marshal(&nodeExecutionMetadata) @@ -321,7 +321,7 @@ func FromNodeExecutionModel(nodeExecutionModel models.NodeExecution, opts *Execu return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal closure") } - if closure.GetError() != nil && opts != nil && opts.TrimErrorMessage && len(closure.GetError().Message) > 0 { + if closure.GetError() != nil && opts != nil && opts.TrimErrorMessage && len(closure.GetError().GetMessage()) > 0 { trimmedErrOutputResult := closure.GetError() trimmedErrMessage := TrimErrorMessage(trimmedErrOutputResult.GetMessage()) trimmedErrOutputResult.Message = trimmedErrMessage @@ -337,7 +337,7 @@ func FromNodeExecutionModel(nodeExecutionModel models.NodeExecution, opts *Execu } // TODO: delete this block and references to preloading child node executions no earlier than Q3 2022 // This is required for historical reasons because propeller did not always send IsParent or IsDynamic in events. - if !(nodeExecutionMetadata.IsParentNode || nodeExecutionMetadata.IsDynamic) { + if !(nodeExecutionMetadata.GetIsParentNode() || nodeExecutionMetadata.GetIsDynamic()) { if len(nodeExecutionModel.ChildNodeExecutions) > 0 { nodeExecutionMetadata.IsParentNode = true if len(nodeExecutionModel.DynamicWorkflowRemoteClosureReference) > 0 { @@ -380,14 +380,14 @@ func handleNodeExecutionInputs(ctx context.Context, // Inputs are static over the duration of the node execution, no need to update them when they're already set return nil } - switch request.Event.GetInputValue().(type) { + switch request.GetEvent().GetInputValue().(type) { case *event.NodeExecutionEvent_InputUri: - logger.Debugf(ctx, "saving node execution input URI [%s]", request.Event.GetInputUri()) - nodeExecutionModel.InputURI = request.Event.GetInputUri() + logger.Debugf(ctx, "saving node execution input URI [%s]", request.GetEvent().GetInputUri()) + nodeExecutionModel.InputURI = request.GetEvent().GetInputUri() case *event.NodeExecutionEvent_InputData: - uri, err := common.OffloadLiteralMap(ctx, storageClient, request.Event.GetInputData(), - request.Event.Id.ExecutionId.Project, request.Event.Id.ExecutionId.Domain, request.Event.Id.ExecutionId.Name, - request.Event.Id.NodeId, InputsObjectSuffix) + uri, err := common.OffloadLiteralMap(ctx, storageClient, request.GetEvent().GetInputData(), + request.GetEvent().GetId().GetExecutionId().GetProject(), request.GetEvent().GetId().GetExecutionId().GetDomain(), request.GetEvent().GetId().GetExecutionId().GetName(), + request.GetEvent().GetId().GetNodeId(), InputsObjectSuffix) if err != nil { return err } diff --git a/flyteadmin/pkg/repositories/transformers/node_execution_event.go b/flyteadmin/pkg/repositories/transformers/node_execution_event.go index c817054499..88c4d8be0e 100644 --- a/flyteadmin/pkg/repositories/transformers/node_execution_event.go +++ b/flyteadmin/pkg/repositories/transformers/node_execution_event.go @@ -11,21 +11,21 @@ import ( // Transforms a NodeExecutionEventRequest to a NodeExecutionEvent model func CreateNodeExecutionEventModel(request *admin.NodeExecutionEventRequest) (*models.NodeExecutionEvent, error) { - occurredAt, err := ptypes.Timestamp(request.Event.OccurredAt) + occurredAt, err := ptypes.Timestamp(request.GetEvent().GetOccurredAt()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to marshal occurred at timestamp") } return &models.NodeExecutionEvent{ NodeExecutionKey: models.NodeExecutionKey{ - NodeID: request.Event.Id.NodeId, + NodeID: request.GetEvent().GetId().GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: request.Event.Id.ExecutionId.Project, - Domain: request.Event.Id.ExecutionId.Domain, - Name: request.Event.Id.ExecutionId.Name, + Project: request.GetEvent().GetId().GetExecutionId().GetProject(), + Domain: request.GetEvent().GetId().GetExecutionId().GetDomain(), + Name: request.GetEvent().GetId().GetExecutionId().GetName(), }, }, - RequestID: request.RequestId, + RequestID: request.GetRequestId(), OccurredAt: occurredAt, - Phase: request.Event.Phase.String(), + Phase: request.GetEvent().GetPhase().String(), }, nil } diff --git a/flyteadmin/pkg/repositories/transformers/node_execution_test.go b/flyteadmin/pkg/repositories/transformers/node_execution_test.go index a52c8e76a3..e37d312612 100644 --- a/flyteadmin/pkg/repositories/transformers/node_execution_test.go +++ b/flyteadmin/pkg/repositories/transformers/node_execution_test.go @@ -72,7 +72,7 @@ func TestAddRunningState(t *testing.T) { err := addNodeRunningState(&request, &nodeExecutionModel, &closure) assert.Nil(t, err) assert.Equal(t, startedAt, *nodeExecutionModel.StartedAt) - assert.True(t, proto.Equal(startedAtProto, closure.StartedAt)) + assert.True(t, proto.Equal(startedAtProto, closure.GetStartedAt())) } func TestAddTerminalState_OutputURI(t *testing.T) { @@ -251,9 +251,9 @@ func TestCreateNodeExecutionModel(t *testing.T) { UpdatedAt: occurredAtProto, TargetMetadata: &admin.NodeExecutionClosure_TaskNodeMetadata{ TaskNodeMetadata: &admin.TaskNodeMetadata{ - CacheStatus: request.Event.GetTaskNodeMetadata().CacheStatus, - CatalogKey: request.Event.GetTaskNodeMetadata().CatalogKey, - CheckpointUri: request.Event.GetTaskNodeMetadata().CheckpointUri, + CacheStatus: request.GetEvent().GetTaskNodeMetadata().GetCacheStatus(), + CatalogKey: request.GetEvent().GetTaskNodeMetadata().GetCatalogKey(), + CheckpointUri: request.GetEvent().GetTaskNodeMetadata().GetCheckpointUri(), }, }, } @@ -266,7 +266,7 @@ func TestCreateNodeExecutionModel(t *testing.T) { EventVersion: 2, } internalDataBytes, _ := proto.Marshal(internalData) - cacheStatus := request.Event.GetTaskNodeMetadata().CacheStatus.String() + cacheStatus := request.GetEvent().GetTaskNodeMetadata().GetCacheStatus().String() assert.Equal(t, &models.NodeExecution{ NodeExecutionKey: models.NodeExecutionKey{ NodeID: "node id", @@ -383,7 +383,7 @@ func TestUpdateNodeExecutionModel(t *testing.T) { assert.Equal(t, occurredAt, *nodeExecutionModel.StartedAt) assert.EqualValues(t, occurredAt, *nodeExecutionModel.NodeExecutionUpdatedAt) assert.NotNil(t, nodeExecutionModel.CacheStatus) - assert.Equal(t, *nodeExecutionModel.CacheStatus, request.Event.GetTaskNodeMetadata().CacheStatus.String()) + assert.Equal(t, *nodeExecutionModel.CacheStatus, request.GetEvent().GetTaskNodeMetadata().GetCacheStatus().String()) assert.Equal(t, nodeExecutionModel.DynamicWorkflowRemoteClosureReference, dynamicWorkflowClosureRef) var closure = &admin.NodeExecutionClosure{ @@ -392,12 +392,12 @@ func TestUpdateNodeExecutionModel(t *testing.T) { UpdatedAt: occurredAtProto, TargetMetadata: &admin.NodeExecutionClosure_TaskNodeMetadata{ TaskNodeMetadata: &admin.TaskNodeMetadata{ - CacheStatus: request.Event.GetTaskNodeMetadata().CacheStatus, - CatalogKey: request.Event.GetTaskNodeMetadata().CatalogKey, - CheckpointUri: request.Event.GetTaskNodeMetadata().CheckpointUri, + CacheStatus: request.GetEvent().GetTaskNodeMetadata().GetCacheStatus(), + CatalogKey: request.GetEvent().GetTaskNodeMetadata().GetCatalogKey(), + CheckpointUri: request.GetEvent().GetTaskNodeMetadata().GetCheckpointUri(), }, }, - DynamicJobSpecUri: request.Event.GetTaskNodeMetadata().DynamicWorkflow.DynamicJobSpecUri, + DynamicJobSpecUri: request.GetEvent().GetTaskNodeMetadata().GetDynamicWorkflow().GetDynamicJobSpecUri(), } var closureBytes, _ = proto.Marshal(closure) assert.Equal(t, nodeExecutionModel.Closure, closureBytes) @@ -553,7 +553,7 @@ func TestFromNodeExecutionModel_Error(t *testing.T) { expectedExecErr := execErr expectedExecErr.Message = string(make([]byte, trimmedErrMessageLen)) assert.Nil(t, err) - assert.True(t, proto.Equal(expectedExecErr, nodeExecution.Closure.GetError())) + assert.True(t, proto.Equal(expectedExecErr, nodeExecution.GetClosure().GetError())) } func TestFromNodeExecutionModelWithChildren(t *testing.T) { diff --git a/flyteadmin/pkg/repositories/transformers/project.go b/flyteadmin/pkg/repositories/transformers/project.go index e1e6e90f84..b9690cecc5 100644 --- a/flyteadmin/pkg/repositories/transformers/project.go +++ b/flyteadmin/pkg/repositories/transformers/project.go @@ -14,12 +14,12 @@ type CreateProjectModelInput struct { } func CreateProjectModel(project *admin.Project) models.Project { - stateInt := int32(project.State) - if project.Labels == nil { + stateInt := int32(project.GetState()) + if project.GetLabels() == nil { return models.Project{ - Identifier: project.Id, - Name: project.Name, - Description: project.Description, + Identifier: project.GetId(), + Name: project.GetName(), + Description: project.GetDescription(), State: &stateInt, } } @@ -28,9 +28,9 @@ func CreateProjectModel(project *admin.Project) models.Project { return models.Project{} } return models.Project{ - Identifier: project.Id, - Name: project.Name, - Description: project.Description, + Identifier: project.GetId(), + Name: project.GetName(), + Description: project.GetDescription(), Labels: projectBytes, State: &stateInt, } @@ -46,7 +46,7 @@ func FromProjectModel(projectModel models.Project, domains []*admin.Domain) *adm Id: projectModel.Identifier, Name: projectModel.Name, Description: projectModel.Description, - Labels: projectDeserialized.Labels, + Labels: projectDeserialized.GetLabels(), State: admin.Project_ProjectState(*projectModel.State), } project.Domains = domains diff --git a/flyteadmin/pkg/repositories/transformers/project_test.go b/flyteadmin/pkg/repositories/transformers/project_test.go index 914194f1dc..d9bedb2038 100644 --- a/flyteadmin/pkg/repositories/transformers/project_test.go +++ b/flyteadmin/pkg/repositories/transformers/project_test.go @@ -95,10 +95,10 @@ func TestFromProjectModels(t *testing.T) { projects := FromProjectModels(projectModels, domains) assert.Len(t, projects, 2) for index, project := range projects { - assert.Equal(t, fmt.Sprintf("proj%v_id", index+1), project.Id) - assert.Equal(t, fmt.Sprintf("proj%v_name", index+1), project.Name) - assert.Equal(t, fmt.Sprintf("proj%v_description", index+1), project.Description) - assert.Equal(t, admin.Project_ACTIVE, project.State) - assert.EqualValues(t, domains, project.Domains) + assert.Equal(t, fmt.Sprintf("proj%v_id", index+1), project.GetId()) + assert.Equal(t, fmt.Sprintf("proj%v_name", index+1), project.GetName()) + assert.Equal(t, fmt.Sprintf("proj%v_description", index+1), project.GetDescription()) + assert.Equal(t, admin.Project_ACTIVE, project.GetState()) + assert.EqualValues(t, domains, project.GetDomains()) } } diff --git a/flyteadmin/pkg/repositories/transformers/resource.go b/flyteadmin/pkg/repositories/transformers/resource.go index 36b5ddbd6a..4b4a226c92 100644 --- a/flyteadmin/pkg/repositories/transformers/resource.go +++ b/flyteadmin/pkg/repositories/transformers/resource.go @@ -14,14 +14,14 @@ import ( ) func WorkflowAttributesToResourceModel(attributes *admin.WorkflowAttributes, resource admin.MatchableResource) (models.Resource, error) { - attributeBytes, err := proto.Marshal(attributes.MatchingAttributes) + attributeBytes, err := proto.Marshal(attributes.GetMatchingAttributes()) if err != nil { return models.Resource{}, err } return models.Resource{ - Project: attributes.Project, - Domain: attributes.Domain, - Workflow: attributes.Workflow, + Project: attributes.GetProject(), + Domain: attributes.GetDomain(), + Workflow: attributes.GetWorkflow(), ResourceType: resource.String(), Priority: models.ResourcePriorityWorkflowLevel, Attributes: attributeBytes, @@ -31,15 +31,15 @@ func WorkflowAttributesToResourceModel(attributes *admin.WorkflowAttributes, res func mergeUpdatePluginOverrides(existingAttributes *admin.MatchingAttributes, newMatchingAttributes *admin.MatchingAttributes) *admin.MatchingAttributes { taskPluginOverrides := make(map[string]*admin.PluginOverride) - if existingAttributes.GetPluginOverrides() != nil && len(existingAttributes.GetPluginOverrides().Overrides) > 0 { - for _, pluginOverride := range existingAttributes.GetPluginOverrides().Overrides { - taskPluginOverrides[pluginOverride.TaskType] = pluginOverride + if existingAttributes.GetPluginOverrides() != nil && len(existingAttributes.GetPluginOverrides().GetOverrides()) > 0 { + for _, pluginOverride := range existingAttributes.GetPluginOverrides().GetOverrides() { + taskPluginOverrides[pluginOverride.GetTaskType()] = pluginOverride } } if newMatchingAttributes.GetPluginOverrides() != nil && - len(newMatchingAttributes.GetPluginOverrides().Overrides) > 0 { - for _, pluginOverride := range newMatchingAttributes.GetPluginOverrides().Overrides { - taskPluginOverrides[pluginOverride.TaskType] = pluginOverride + len(newMatchingAttributes.GetPluginOverrides().GetOverrides()) > 0 { + for _, pluginOverride := range newMatchingAttributes.GetPluginOverrides().GetOverrides() { + taskPluginOverrides[pluginOverride.GetTaskType()] = pluginOverride } } @@ -99,13 +99,13 @@ func FromResourceModelToWorkflowAttributes(model models.Resource) (admin.Workflo } func ProjectDomainAttributesToResourceModel(attributes *admin.ProjectDomainAttributes, resource admin.MatchableResource) (models.Resource, error) { - attributeBytes, err := proto.Marshal(attributes.MatchingAttributes) + attributeBytes, err := proto.Marshal(attributes.GetMatchingAttributes()) if err != nil { return models.Resource{}, err } return models.Resource{ - Project: attributes.Project, - Domain: attributes.Domain, + Project: attributes.GetProject(), + Domain: attributes.GetDomain(), ResourceType: resource.String(), Priority: models.ResourcePriorityProjectDomainLevel, Attributes: attributeBytes, @@ -113,12 +113,12 @@ func ProjectDomainAttributesToResourceModel(attributes *admin.ProjectDomainAttri } func ProjectAttributesToResourceModel(attributes *admin.ProjectAttributes, resource admin.MatchableResource) (models.Resource, error) { - attributeBytes, err := proto.Marshal(attributes.MatchingAttributes) + attributeBytes, err := proto.Marshal(attributes.GetMatchingAttributes()) if err != nil { return models.Resource{}, err } return models.Resource{ - Project: attributes.Project, + Project: attributes.GetProject(), ResourceType: resource.String(), Priority: models.ResourcePriorityProjectLevel, Attributes: attributeBytes, diff --git a/flyteadmin/pkg/repositories/transformers/resource_test.go b/flyteadmin/pkg/repositories/transformers/resource_test.go index 6efcc89fc0..a1ef2cacef 100644 --- a/flyteadmin/pkg/repositories/transformers/resource_test.go +++ b/flyteadmin/pkg/repositories/transformers/resource_test.go @@ -97,15 +97,15 @@ func TestMergeUpdateProjectDomainAttributes(t *testing.T) { assert.NoError(t, err) var sawPythonTask, sawSidecarTask, sawHiveTask bool for _, override := range updatedAttributes.GetPluginOverrides().GetOverrides() { - if override.TaskType == "python" { + if override.GetTaskType() == "python" { sawPythonTask = true - assert.EqualValues(t, []string{"plugin_a"}, override.PluginId) - } else if override.TaskType == "sidecar" { + assert.EqualValues(t, []string{"plugin_a"}, override.GetPluginId()) + } else if override.GetTaskType() == "sidecar" { sawSidecarTask = true - assert.EqualValues(t, []string{"plugin_c"}, override.PluginId) - } else if override.TaskType == "hive" { + assert.EqualValues(t, []string{"plugin_c"}, override.GetPluginId()) + } else if override.GetTaskType() == "hive" { sawHiveTask = true - assert.EqualValues(t, []string{"plugin_d"}, override.PluginId) + assert.EqualValues(t, []string{"plugin_d"}, override.GetPluginId()) } } assert.True(t, sawPythonTask, "Missing python task from finalized attributes") @@ -194,15 +194,15 @@ func TestMergeUpdateWorkflowAttributes(t *testing.T) { assert.NoError(t, err) var sawPythonTask, sawSidecarTask, sawHiveTask bool for _, override := range updatedAttributes.GetPluginOverrides().GetOverrides() { - if override.TaskType == "python" { + if override.GetTaskType() == "python" { sawPythonTask = true - assert.EqualValues(t, []string{"plugin_a"}, override.PluginId) - } else if override.TaskType == "sidecar" { + assert.EqualValues(t, []string{"plugin_a"}, override.GetPluginId()) + } else if override.GetTaskType() == "sidecar" { sawSidecarTask = true - assert.EqualValues(t, []string{"plugin_c"}, override.PluginId) - } else if override.TaskType == "hive" { + assert.EqualValues(t, []string{"plugin_c"}, override.GetPluginId()) + } else if override.GetTaskType() == "hive" { sawHiveTask = true - assert.EqualValues(t, []string{"plugin_d"}, override.PluginId) + assert.EqualValues(t, []string{"plugin_d"}, override.GetPluginId()) } } assert.True(t, sawPythonTask, "Missing python task from finalized attributes") diff --git a/flyteadmin/pkg/repositories/transformers/signal.go b/flyteadmin/pkg/repositories/transformers/signal.go index bbef0a00eb..5cb1b37ef4 100644 --- a/flyteadmin/pkg/repositories/transformers/signal.go +++ b/flyteadmin/pkg/repositories/transformers/signal.go @@ -14,21 +14,21 @@ func CreateSignalModel(signalID *core.SignalIdentifier, signalType *core.Literal signalModel := models.Signal{} if signalID != nil { signalKey := &signalModel.SignalKey - if signalID.ExecutionId != nil { + if signalID.GetExecutionId() != nil { executionKey := &signalKey.ExecutionKey - if len(signalID.ExecutionId.Project) > 0 { - executionKey.Project = signalID.ExecutionId.Project + if len(signalID.GetExecutionId().GetProject()) > 0 { + executionKey.Project = signalID.GetExecutionId().GetProject() } - if len(signalID.ExecutionId.Domain) > 0 { - executionKey.Domain = signalID.ExecutionId.Domain + if len(signalID.GetExecutionId().GetDomain()) > 0 { + executionKey.Domain = signalID.GetExecutionId().GetDomain() } - if len(signalID.ExecutionId.Name) > 0 { - executionKey.Name = signalID.ExecutionId.Name + if len(signalID.GetExecutionId().GetName()) > 0 { + executionKey.Name = signalID.GetExecutionId().GetName() } } - if len(signalID.SignalId) > 0 { - signalKey.SignalID = signalID.SignalId + if len(signalID.GetSignalId()) > 0 { + signalKey.SignalID = signalID.GetSignalId() } } diff --git a/flyteadmin/pkg/repositories/transformers/signal_test.go b/flyteadmin/pkg/repositories/transformers/signal_test.go index a54d5f1437..82637e06f3 100644 --- a/flyteadmin/pkg/repositories/transformers/signal_test.go +++ b/flyteadmin/pkg/repositories/transformers/signal_test.go @@ -82,7 +82,7 @@ func TestCreateSignalModel(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - signalModel, err := CreateSignalModel(test.proto.Id, test.proto.Type, test.proto.Value) + signalModel, err := CreateSignalModel(test.proto.GetId(), test.proto.GetType(), test.proto.GetValue()) assert.NoError(t, err) assert.Equal(t, test.model, signalModel) diff --git a/flyteadmin/pkg/repositories/transformers/task.go b/flyteadmin/pkg/repositories/transformers/task.go index a8baf355e7..6c64ee2ec0 100644 --- a/flyteadmin/pkg/repositories/transformers/task.go +++ b/flyteadmin/pkg/repositories/transformers/task.go @@ -22,15 +22,15 @@ func CreateTaskModel( return models.Task{}, errors.NewFlyteAdminError(codes.Internal, "Failed to serialize task closure") } var taskType string - if taskClosure.CompiledTask != nil && taskClosure.CompiledTask.Template != nil { - taskType = taskClosure.CompiledTask.Template.Type + if taskClosure.GetCompiledTask() != nil && taskClosure.GetCompiledTask().GetTemplate() != nil { + taskType = taskClosure.GetCompiledTask().GetTemplate().GetType() } return models.Task{ TaskKey: models.TaskKey{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, - Version: request.Id.Version, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), + Version: request.GetId().GetVersion(), }, Closure: closureBytes, Digest: digest, diff --git a/flyteadmin/pkg/repositories/transformers/task_execution.go b/flyteadmin/pkg/repositories/transformers/task_execution.go index 9f24ed2aa4..2d32160585 100644 --- a/flyteadmin/pkg/repositories/transformers/task_execution.go +++ b/flyteadmin/pkg/repositories/transformers/task_execution.go @@ -11,6 +11,7 @@ import ( _struct "github.com/golang/protobuf/ptypes/struct" "google.golang.org/grpc/codes" "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" "github.com/flyteorg/flyte/flyteadmin/pkg/common" "github.com/flyteorg/flyte/flyteadmin/pkg/errors" @@ -34,7 +35,7 @@ type CreateTaskExecutionModelInput struct { func addTaskStartedState(request *admin.TaskExecutionEventRequest, taskExecutionModel *models.TaskExecution, closure *admin.TaskExecutionClosure) error { - occurredAt, err := ptypes.Timestamp(request.Event.OccurredAt) + occurredAt, err := ptypes.Timestamp(request.GetEvent().GetOccurredAt()) if err != nil { return errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal occurredAt with error: %v", err) } @@ -43,7 +44,7 @@ func addTaskStartedState(request *admin.TaskExecutionEventRequest, taskExecution // This check makes sure any out of order if taskExecutionModel.StartedAt == nil { taskExecutionModel.StartedAt = &occurredAt - closure.StartedAt = request.Event.OccurredAt + closure.StartedAt = request.GetEvent().GetOccurredAt() } return nil } @@ -56,7 +57,7 @@ func addTaskTerminalState( if taskExecutionModel.StartedAt == nil { logger.Warning(context.Background(), "task execution is missing StartedAt") } else { - endTime, err := ptypes.Timestamp(request.Event.OccurredAt) + endTime, err := ptypes.Timestamp(request.GetEvent().GetOccurredAt()) if err != nil { return errors.NewFlyteAdminErrorf( codes.Internal, "Failed to parse task execution occurredAt timestamp: %v", err) @@ -70,23 +71,23 @@ func addTaskTerminalState( closure.Duration = ptypes.DurationProto(taskExecutionModel.Duration) } - if request.Event.GetOutputUri() != "" { + if request.GetEvent().GetOutputUri() != "" { closure.OutputResult = &admin.TaskExecutionClosure_OutputUri{ - OutputUri: request.Event.GetOutputUri(), + OutputUri: request.GetEvent().GetOutputUri(), } - } else if request.Event.GetOutputData() != nil { + } else if request.GetEvent().GetOutputData() != nil { switch inlineEventDataPolicy { case interfaces.InlineEventDataPolicyStoreInline: closure.OutputResult = &admin.TaskExecutionClosure_OutputData{ - OutputData: request.Event.GetOutputData(), + OutputData: request.GetEvent().GetOutputData(), } default: logger.Debugf(ctx, "Offloading outputs per InlineEventDataPolicy") - uri, err := common.OffloadLiteralMap(ctx, storageClient, request.Event.GetOutputData(), - request.Event.ParentNodeExecutionId.ExecutionId.Project, request.Event.ParentNodeExecutionId.ExecutionId.Domain, - request.Event.ParentNodeExecutionId.ExecutionId.Name, request.Event.ParentNodeExecutionId.NodeId, - request.Event.TaskId.Project, request.Event.TaskId.Domain, request.Event.TaskId.Name, request.Event.TaskId.Version, - strconv.FormatUint(uint64(request.Event.RetryAttempt), 10), OutputsObjectSuffix) + uri, err := common.OffloadLiteralMap(ctx, storageClient, request.GetEvent().GetOutputData(), + request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetProject(), request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetDomain(), + request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetName(), request.GetEvent().GetParentNodeExecutionId().GetNodeId(), + request.GetEvent().GetTaskId().GetProject(), request.GetEvent().GetTaskId().GetDomain(), request.GetEvent().GetTaskId().GetName(), request.GetEvent().GetTaskId().GetVersion(), + strconv.FormatUint(uint64(request.GetEvent().GetRetryAttempt()), 10), OutputsObjectSuffix) if err != nil { return err } @@ -94,9 +95,9 @@ func addTaskTerminalState( OutputUri: uri.String(), } } - } else if request.Event.GetError() != nil { + } else if request.GetEvent().GetError() != nil { closure.OutputResult = &admin.TaskExecutionClosure_Error{ - Error: request.Event.GetError(), + Error: request.GetEvent().GetError(), } } return nil @@ -106,35 +107,35 @@ func CreateTaskExecutionModel(ctx context.Context, input CreateTaskExecutionMode taskExecution := &models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: input.Request.Event.TaskId.Project, - Domain: input.Request.Event.TaskId.Domain, - Name: input.Request.Event.TaskId.Name, - Version: input.Request.Event.TaskId.Version, + Project: input.Request.GetEvent().GetTaskId().GetProject(), + Domain: input.Request.GetEvent().GetTaskId().GetDomain(), + Name: input.Request.GetEvent().GetTaskId().GetName(), + Version: input.Request.GetEvent().GetTaskId().GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: input.Request.Event.ParentNodeExecutionId.NodeId, + NodeID: input.Request.GetEvent().GetParentNodeExecutionId().GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: input.Request.Event.ParentNodeExecutionId.ExecutionId.Project, - Domain: input.Request.Event.ParentNodeExecutionId.ExecutionId.Domain, - Name: input.Request.Event.ParentNodeExecutionId.ExecutionId.Name, + Project: input.Request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetProject(), + Domain: input.Request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetDomain(), + Name: input.Request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetName(), }, }, RetryAttempt: &input.Request.Event.RetryAttempt, }, - Phase: input.Request.Event.Phase.String(), - PhaseVersion: input.Request.Event.PhaseVersion, + Phase: input.Request.GetEvent().GetPhase().String(), + PhaseVersion: input.Request.GetEvent().GetPhaseVersion(), } err := handleTaskExecutionInputs(ctx, taskExecution, input.Request, input.StorageClient) if err != nil { return nil, err } - metadata := input.Request.Event.Metadata - if metadata != nil && len(metadata.ExternalResources) > 1 { - sort.Slice(metadata.ExternalResources, func(i, j int) bool { - a := metadata.ExternalResources[i] - b := metadata.ExternalResources[j] + metadata := input.Request.GetEvent().GetMetadata() + if metadata != nil && len(metadata.GetExternalResources()) > 1 { + sort.Slice(metadata.GetExternalResources(), func(i, j int) bool { + a := metadata.GetExternalResources()[i] + b := metadata.GetExternalResources()[j] if a.GetIndex() == b.GetIndex() { return a.GetRetryAttempt() < b.GetRetryAttempt() } @@ -142,41 +143,41 @@ func CreateTaskExecutionModel(ctx context.Context, input CreateTaskExecutionMode }) } - reportedAt := input.Request.Event.ReportedAt - if reportedAt == nil || (reportedAt.Seconds == 0 && reportedAt.Nanos == 0) { - reportedAt = input.Request.Event.OccurredAt + reportedAt := input.Request.GetEvent().GetReportedAt() + if reportedAt == nil || (reportedAt.GetSeconds() == 0 && reportedAt.GetNanos() == 0) { + reportedAt = input.Request.GetEvent().GetOccurredAt() } closure := &admin.TaskExecutionClosure{ - Phase: input.Request.Event.Phase, + Phase: input.Request.GetEvent().GetPhase(), UpdatedAt: reportedAt, - CreatedAt: input.Request.Event.OccurredAt, - Logs: input.Request.Event.Logs, - CustomInfo: input.Request.Event.CustomInfo, - TaskType: input.Request.Event.TaskType, + CreatedAt: input.Request.GetEvent().GetOccurredAt(), + Logs: input.Request.GetEvent().GetLogs(), + CustomInfo: input.Request.GetEvent().GetCustomInfo(), + TaskType: input.Request.GetEvent().GetTaskType(), Metadata: metadata, - EventVersion: input.Request.Event.EventVersion, + EventVersion: input.Request.GetEvent().GetEventVersion(), } - if len(input.Request.Event.Reasons) > 0 { - for _, reason := range input.Request.Event.Reasons { - closure.Reasons = append(closure.Reasons, &admin.Reason{ - OccurredAt: reason.OccurredAt, - Message: reason.Reason, + if len(input.Request.GetEvent().GetReasons()) > 0 { + for _, reason := range input.Request.GetEvent().GetReasons() { + closure.Reasons = append(closure.GetReasons(), &admin.Reason{ + OccurredAt: reason.GetOccurredAt(), + Message: reason.GetReason(), }) } - closure.Reason = input.Request.Event.Reasons[len(input.Request.Event.Reasons)-1].Reason - } else if len(input.Request.Event.Reason) > 0 { + closure.Reason = input.Request.GetEvent().GetReasons()[len(input.Request.GetEvent().GetReasons())-1].GetReason() + } else if len(input.Request.GetEvent().GetReason()) > 0 { closure.Reasons = []*admin.Reason{ { - OccurredAt: input.Request.Event.OccurredAt, - Message: input.Request.Event.Reason, + OccurredAt: input.Request.GetEvent().GetOccurredAt(), + Message: input.Request.GetEvent().GetReason(), }, } - closure.Reason = input.Request.Event.Reason + closure.Reason = input.Request.GetEvent().GetReason() } - eventPhase := input.Request.Event.Phase + eventPhase := input.Request.GetEvent().GetPhase() // Different tasks may report different phases as their first event. // If the first event we receive for this execution is a valid @@ -188,7 +189,7 @@ func CreateTaskExecutionModel(ctx context.Context, input CreateTaskExecutionMode } } - if common.IsTaskExecutionTerminal(input.Request.Event.Phase) { + if common.IsTaskExecutionTerminal(input.Request.GetEvent().GetPhase()) { err := addTaskTerminalState(ctx, input.Request, taskExecution, closure, input.InlineEventDataPolicy, input.StorageClient) if err != nil { return nil, err @@ -201,7 +202,7 @@ func CreateTaskExecutionModel(ctx context.Context, input CreateTaskExecutionMode } taskExecution.Closure = marshaledClosure - taskExecutionCreatedAt, err := ptypes.Timestamp(input.Request.Event.OccurredAt) + taskExecutionCreatedAt, err := ptypes.Timestamp(input.Request.GetEvent().GetOccurredAt()) if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to read event timestamp") } @@ -232,17 +233,17 @@ func mergeLogs(existing, latest []*core.TaskLog) []*core.TaskLog { latestSetByURI := make(map[string]*core.TaskLog, len(latest)) latestSetByName := make(map[string]*core.TaskLog, len(latest)) for _, latestLog := range latest { - latestSetByURI[latestLog.Uri] = latestLog - if len(latestLog.Name) > 0 { - latestSetByName[latestLog.Name] = latestLog + latestSetByURI[latestLog.GetUri()] = latestLog + if len(latestLog.GetName()) > 0 { + latestSetByName[latestLog.GetName()] = latestLog } } // Copy over the latest logs since names will change for existing logs as a task transitions across phases. logs := latest for _, existingLog := range existing { - if _, ok := latestSetByURI[existingLog.Uri]; !ok { - if _, ok = latestSetByName[existingLog.Name]; !ok { + if _, ok := latestSetByURI[existingLog.GetUri()]; !ok { + if _, ok = latestSetByName[existingLog.GetName()]; !ok { // We haven't seen this log before: add it to the output result list. logs = append(logs, existingLog) } @@ -299,16 +300,21 @@ func mergeExternalResource(existing, latest *event.ExternalResourceInfo) *event. return existing } - if latest.ExternalId != "" && existing.ExternalId != latest.ExternalId { - existing.ExternalId = latest.ExternalId + if latest.GetExternalId() != "" && existing.GetExternalId() != latest.GetExternalId() { + existing.ExternalId = latest.GetExternalId() } // note we are not updating existing.Index and existing.RetryAttempt because they are the // search key for our ExternalResource pool. - existing.Phase = latest.Phase - if latest.CacheStatus != core.CatalogCacheStatus_CACHE_DISABLED && existing.CacheStatus != latest.CacheStatus { - existing.CacheStatus = latest.CacheStatus + existing.Phase = latest.GetPhase() + if latest.GetCacheStatus() != core.CatalogCacheStatus_CACHE_DISABLED && existing.GetCacheStatus() != latest.GetCacheStatus() { + existing.CacheStatus = latest.GetCacheStatus() + } + existing.Logs = mergeLogs(existing.GetLogs(), latest.GetLogs()) + + // Overwrite custom info if provided + if latest.GetCustomInfo() != nil { + existing.CustomInfo = proto.Clone(latest.GetCustomInfo()).(*structpb.Struct) } - existing.Logs = mergeLogs(existing.Logs, latest.Logs) return existing } @@ -357,16 +363,16 @@ func mergeMetadata(existing, latest *event.TaskExecutionMetadata) *event.TaskExe return existing } - if latest.GeneratedName != "" && existing.GeneratedName != latest.GeneratedName { - existing.GeneratedName = latest.GeneratedName + if latest.GetGeneratedName() != "" && existing.GetGeneratedName() != latest.GetGeneratedName() { + existing.GeneratedName = latest.GetGeneratedName() } - existing.ExternalResources = mergeExternalResources(existing.ExternalResources, latest.ExternalResources) - existing.ResourcePoolInfo = latest.ResourcePoolInfo - if latest.PluginIdentifier != "" && existing.PluginIdentifier != latest.PluginIdentifier { - existing.PluginIdentifier = latest.PluginIdentifier + existing.ExternalResources = mergeExternalResources(existing.GetExternalResources(), latest.GetExternalResources()) + existing.ResourcePoolInfo = latest.GetResourcePoolInfo() + if latest.GetPluginIdentifier() != "" && existing.GetPluginIdentifier() != latest.GetPluginIdentifier() { + existing.PluginIdentifier = latest.GetPluginIdentifier() } - if latest.InstanceClass != event.TaskExecutionMetadata_DEFAULT && existing.InstanceClass != latest.InstanceClass { - existing.InstanceClass = latest.InstanceClass + if latest.GetInstanceClass() != event.TaskExecutionMetadata_DEFAULT && existing.GetInstanceClass() != latest.GetInstanceClass() { + existing.InstanceClass = latest.GetInstanceClass() } return existing @@ -374,7 +380,7 @@ func mergeMetadata(existing, latest *event.TaskExecutionMetadata) *event.TaskExe func filterExternalResourceLogsByPhase(externalResources []*event.ExternalResourceInfo, phase core.TaskExecution_Phase) { for _, externalResource := range externalResources { - externalResource.Logs = filterLogsByPhase(externalResource.Logs, phase) + externalResource.Logs = filterLogsByPhase(externalResource.GetLogs(), phase) } } @@ -382,13 +388,13 @@ func filterLogsByPhase(logs []*core.TaskLog, phase core.TaskExecution_Phase) []* filteredLogs := make([]*core.TaskLog, 0, len(logs)) for _, l := range logs { - if common.IsTaskExecutionTerminal(phase) && l.HideOnceFinished { + if common.IsTaskExecutionTerminal(phase) && l.GetHideOnceFinished() { continue } // Some plugins like e.g. Dask, Ray start with or very quickly transition to core.TaskExecution_INITIALIZING // once the CR has been created even though the underlying pods are still pending. We thus treat queued and // initializing the same here. - if (phase == core.TaskExecution_QUEUED || phase == core.TaskExecution_INITIALIZING) && !l.ShowWhilePending { + if (phase == core.TaskExecution_QUEUED || phase == core.TaskExecution_INITIALIZING) && !l.GetShowWhilePending() { continue } filteredLogs = append(filteredLogs, l) @@ -409,45 +415,45 @@ func UpdateTaskExecutionModel(ctx context.Context, request *admin.TaskExecutionE return errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal task execution closure with error: %+v", err) } - isPhaseChange := taskExecutionModel.Phase != request.Event.Phase.String() + isPhaseChange := taskExecutionModel.Phase != request.GetEvent().GetPhase().String() existingTaskPhase := taskExecutionModel.Phase - taskExecutionModel.Phase = request.Event.Phase.String() - taskExecutionModel.PhaseVersion = request.Event.PhaseVersion - taskExecutionClosure.Phase = request.Event.Phase - reportedAt := request.Event.ReportedAt - if reportedAt == nil || (reportedAt.Seconds == 0 && reportedAt.Nanos == 0) { - reportedAt = request.Event.OccurredAt + taskExecutionModel.Phase = request.GetEvent().GetPhase().String() + taskExecutionModel.PhaseVersion = request.GetEvent().GetPhaseVersion() + taskExecutionClosure.Phase = request.GetEvent().GetPhase() + reportedAt := request.GetEvent().GetReportedAt() + if reportedAt == nil || (reportedAt.GetSeconds() == 0 && reportedAt.GetNanos() == 0) { + reportedAt = request.GetEvent().GetOccurredAt() } taskExecutionClosure.UpdatedAt = reportedAt - mergedLogs := mergeLogs(taskExecutionClosure.Logs, request.Event.Logs) - filteredLogs := filterLogsByPhase(mergedLogs, request.Event.Phase) + mergedLogs := mergeLogs(taskExecutionClosure.GetLogs(), request.GetEvent().GetLogs()) + filteredLogs := filterLogsByPhase(mergedLogs, request.GetEvent().GetPhase()) taskExecutionClosure.Logs = filteredLogs - if len(request.Event.Reasons) > 0 { - for _, reason := range request.Event.Reasons { + if len(request.GetEvent().GetReasons()) > 0 { + for _, reason := range request.GetEvent().GetReasons() { taskExecutionClosure.Reasons = append( - taskExecutionClosure.Reasons, + taskExecutionClosure.GetReasons(), &admin.Reason{ - OccurredAt: reason.OccurredAt, - Message: reason.Reason, + OccurredAt: reason.GetOccurredAt(), + Message: reason.GetReason(), }) } - taskExecutionClosure.Reason = request.Event.Reasons[len(request.Event.Reasons)-1].Reason - } else if len(request.Event.Reason) > 0 { - if taskExecutionClosure.Reason != request.Event.Reason { + taskExecutionClosure.Reason = request.GetEvent().GetReasons()[len(request.GetEvent().GetReasons())-1].GetReason() + } else if len(request.GetEvent().GetReason()) > 0 { + if taskExecutionClosure.GetReason() != request.GetEvent().GetReason() { // by tracking a time-series of reasons we increase the size of the TaskExecutionClosure in scenarios where // a task reports a large number of unique reasons. if this size increase becomes problematic we this logic // will need to be revisited. taskExecutionClosure.Reasons = append( - taskExecutionClosure.Reasons, + taskExecutionClosure.GetReasons(), &admin.Reason{ - OccurredAt: request.Event.OccurredAt, - Message: request.Event.Reason, + OccurredAt: request.GetEvent().GetOccurredAt(), + Message: request.GetEvent().GetReason(), }) } - taskExecutionClosure.Reason = request.Event.Reason + taskExecutionClosure.Reason = request.GetEvent().GetReason() } if existingTaskPhase != core.TaskExecution_RUNNING.String() && taskExecutionModel.Phase == core.TaskExecution_RUNNING.String() { err = addTaskStartedState(request, taskExecutionModel, &taskExecutionClosure) @@ -456,24 +462,24 @@ func UpdateTaskExecutionModel(ctx context.Context, request *admin.TaskExecutionE } } - if common.IsTaskExecutionTerminal(request.Event.Phase) { + if common.IsTaskExecutionTerminal(request.GetEvent().GetPhase()) { err := addTaskTerminalState(ctx, request, taskExecutionModel, &taskExecutionClosure, inlineEventDataPolicy, storageClient) if err != nil { return err } } - taskExecutionClosure.CustomInfo, err = mergeCustom(taskExecutionClosure.CustomInfo, request.Event.CustomInfo) + taskExecutionClosure.CustomInfo, err = mergeCustom(taskExecutionClosure.GetCustomInfo(), request.GetEvent().GetCustomInfo()) if err != nil { return errors.NewFlyteAdminErrorf(codes.Internal, "failed to merge task event custom_info with error: %v", err) } - taskExecutionClosure.Metadata = mergeMetadata(taskExecutionClosure.Metadata, request.Event.Metadata) + taskExecutionClosure.Metadata = mergeMetadata(taskExecutionClosure.GetMetadata(), request.GetEvent().GetMetadata()) - if isPhaseChange && taskExecutionClosure.Metadata != nil && len(taskExecutionClosure.Metadata.ExternalResources) > 0 { - filterExternalResourceLogsByPhase(taskExecutionClosure.Metadata.ExternalResources, request.Event.Phase) + if isPhaseChange && taskExecutionClosure.GetMetadata() != nil && len(taskExecutionClosure.GetMetadata().GetExternalResources()) > 0 { + filterExternalResourceLogsByPhase(taskExecutionClosure.GetMetadata().GetExternalResources(), request.GetEvent().GetPhase()) } - if request.Event.EventVersion > taskExecutionClosure.EventVersion { - taskExecutionClosure.EventVersion = request.Event.EventVersion + if request.GetEvent().GetEventVersion() > taskExecutionClosure.GetEventVersion() { + taskExecutionClosure.EventVersion = request.GetEvent().GetEventVersion() } marshaledClosure, err := proto.Marshal(&taskExecutionClosure) if err != nil { @@ -495,7 +501,7 @@ func FromTaskExecutionModel(taskExecutionModel models.TaskExecution, opts *Execu if err != nil { return nil, errors.NewFlyteAdminErrorf(codes.Internal, "failed to unmarshal closure") } - if closure.GetError() != nil && opts != nil && opts.TrimErrorMessage && len(closure.GetError().Message) > 0 { + if closure.GetError() != nil && opts != nil && opts.TrimErrorMessage && len(closure.GetError().GetMessage()) > 0 { trimmedErrOutputResult := closure.GetError() trimmedErrMessage := TrimErrorMessage(trimmedErrOutputResult.GetMessage()) trimmedErrOutputResult.Message = trimmedErrMessage @@ -551,15 +557,15 @@ func handleTaskExecutionInputs(ctx context.Context, taskExecutionModel *models.T // Inputs are static over the duration of the task execution, no need to update them when they're already set return nil } - switch request.Event.GetInputValue().(type) { + switch request.GetEvent().GetInputValue().(type) { case *event.TaskExecutionEvent_InputUri: taskExecutionModel.InputURI = request.GetEvent().GetInputUri() case *event.TaskExecutionEvent_InputData: uri, err := common.OffloadLiteralMap(ctx, storageClient, request.GetEvent().GetInputData(), - request.Event.ParentNodeExecutionId.ExecutionId.Project, request.Event.ParentNodeExecutionId.ExecutionId.Domain, - request.Event.ParentNodeExecutionId.ExecutionId.Name, request.Event.ParentNodeExecutionId.NodeId, - request.Event.TaskId.Project, request.Event.TaskId.Domain, request.Event.TaskId.Name, request.Event.TaskId.Version, - strconv.FormatUint(uint64(request.Event.RetryAttempt), 10), InputsObjectSuffix) + request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetProject(), request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetDomain(), + request.GetEvent().GetParentNodeExecutionId().GetExecutionId().GetName(), request.GetEvent().GetParentNodeExecutionId().GetNodeId(), + request.GetEvent().GetTaskId().GetProject(), request.GetEvent().GetTaskId().GetDomain(), request.GetEvent().GetTaskId().GetName(), request.GetEvent().GetTaskId().GetVersion(), + strconv.FormatUint(uint64(request.GetEvent().GetRetryAttempt()), 10), InputsObjectSuffix) if err != nil { return err } diff --git a/flyteadmin/pkg/repositories/transformers/task_execution_test.go b/flyteadmin/pkg/repositories/transformers/task_execution_test.go index 5fc5430192..461ab39c8c 100644 --- a/flyteadmin/pkg/repositories/transformers/task_execution_test.go +++ b/flyteadmin/pkg/repositories/transformers/task_execution_test.go @@ -85,7 +85,7 @@ func TestAddTaskStartedState(t *testing.T) { err := addTaskStartedState(&request, &taskExecutionModel, closure) assert.Nil(t, err) - timestamp, err := ptypes.Timestamp(closure.StartedAt) + timestamp, err := ptypes.Timestamp(closure.GetStartedAt()) assert.Nil(t, err) assert.Equal(t, startedAt, timestamp) assert.Equal(t, &startedAt, taskExecutionModel.StartedAt) @@ -109,7 +109,7 @@ func TestAddTaskStartedState(t *testing.T) { err := addTaskStartedState(&request, &taskExecutionModel, closure) assert.Nil(t, err) - timestamp, err := ptypes.Timestamp(closure.StartedAt) + timestamp, err := ptypes.Timestamp(closure.GetStartedAt()) assert.Nil(t, err) assert.NotEqual(t, oldStartedAt, timestamp) assert.Equal(t, &oldStartedAt, taskExecutionModel.StartedAt) @@ -169,7 +169,7 @@ func TestAddTaskTerminalState_OutputURI(t *testing.T) { duration, err := ptypes.Duration(closure.GetDuration()) assert.Nil(t, err) - assert.EqualValues(t, request.Event.OutputResult, closure.OutputResult) + assert.EqualValues(t, request.GetEvent().GetOutputResult(), closure.GetOutputResult()) assert.EqualValues(t, outputURI, closure.GetOutputUri()) assert.EqualValues(t, time.Minute, duration) @@ -232,7 +232,7 @@ func TestAddTaskTerminalState_OutputData(t *testing.T) { duration, err := ptypes.Duration(closure.GetDuration()) assert.Nil(t, err) - assert.EqualValues(t, request.Event.OutputResult, closure.OutputResult) + assert.EqualValues(t, request.GetEvent().GetOutputResult(), closure.GetOutputResult()) assert.True(t, proto.Equal(outputData, closure.GetOutputData())) assert.EqualValues(t, time.Minute, duration) }) @@ -296,17 +296,17 @@ func TestCreateTaskExecutionModelQueued(t *testing.T) { assert.Equal(t, &models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -370,7 +370,7 @@ func TestCreateTaskExecutionModelRunning(t *testing.T) { CustomInfo: &customInfo, } - t.Logf("expected %+v %+v\n", expectedClosure.Reason, expectedClosure.Reasons) + t.Logf("expected %+v %+v\n", expectedClosure.GetReason(), expectedClosure.GetReasons()) expectedClosureBytes, err := proto.Marshal(expectedClosure) assert.Nil(t, err) @@ -378,17 +378,17 @@ func TestCreateTaskExecutionModelRunning(t *testing.T) { assert.Equal(t, &models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -522,17 +522,17 @@ func TestUpdateTaskExecutionModelRunningToFailed(t *testing.T) { existingTaskExecution := models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -627,17 +627,17 @@ func TestUpdateTaskExecutionModelRunningToFailed(t *testing.T) { assert.EqualValues(t, models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -675,17 +675,17 @@ func TestUpdateTaskExecutionModelFilterLogLinks(t *testing.T) { existingTaskExecution := models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -729,7 +729,7 @@ func TestUpdateTaskExecutionModelFilterLogLinks(t *testing.T) { err = proto.Unmarshal(existingTaskExecution.Closure, updatedClosure) assert.Nil(t, err) - assert.Equal(t, updatedClosure.Logs, []*core.TaskLog{ + assert.Equal(t, updatedClosure.GetLogs(), []*core.TaskLog{ { Uri: "uri-show-pending", ShowWhilePending: true, @@ -776,17 +776,17 @@ func TestUpdateTaskExecutionModelFilterLogLinksArray(t *testing.T) { existingTaskExecution := models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -821,7 +821,7 @@ func TestUpdateTaskExecutionModelFilterLogLinksArray(t *testing.T) { err = proto.Unmarshal(existingTaskExecution.Closure, updatedClosure) assert.Nil(t, err) - assert.Equal(t, updatedClosure.Metadata.ExternalResources[0].Logs, []*core.TaskLog{ + assert.Equal(t, updatedClosure.GetMetadata().GetExternalResources()[0].GetLogs(), []*core.TaskLog{ { Uri: "uri-default", }, @@ -851,17 +851,17 @@ func TestUpdateTaskExecutionModelSingleEvents(t *testing.T) { existingTaskExecution := models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -943,17 +943,17 @@ func TestUpdateTaskExecutionModelBatchedEvents(t *testing.T) { existingTaskExecution := models.TaskExecution{ TaskExecutionKey: models.TaskExecutionKey{ TaskKey: models.TaskKey{ - Project: sampleTaskID.Project, - Domain: sampleTaskID.Domain, - Name: sampleTaskID.Name, - Version: sampleTaskID.Version, + Project: sampleTaskID.GetProject(), + Domain: sampleTaskID.GetDomain(), + Name: sampleTaskID.GetName(), + Version: sampleTaskID.GetVersion(), }, NodeExecutionKey: models.NodeExecutionKey{ - NodeID: sampleNodeExecID.NodeId, + NodeID: sampleNodeExecID.GetNodeId(), ExecutionKey: models.ExecutionKey{ - Project: sampleNodeExecID.ExecutionId.Project, - Domain: sampleNodeExecID.ExecutionId.Domain, - Name: sampleNodeExecID.ExecutionId.Name, + Project: sampleNodeExecID.GetExecutionId().GetProject(), + Domain: sampleNodeExecID.GetExecutionId().GetDomain(), + Name: sampleNodeExecID.GetExecutionId().GetName(), }, }, RetryAttempt: &retryAttemptValue, @@ -1130,7 +1130,7 @@ func TestFromTaskExecutionModel_Error(t *testing.T) { expectedExecErr := execErr expectedExecErr.Message = string(make([]byte, trimmedErrMessageLen)) assert.Nil(t, err) - assert.True(t, proto.Equal(expectedExecErr, taskExecution.Closure.GetError())) + assert.True(t, proto.Equal(expectedExecErr, taskExecution.GetClosure().GetError())) extraShortErrMsg := string(make([]byte, 10)) execErr = &core.ExecutionError{ @@ -1149,7 +1149,7 @@ func TestFromTaskExecutionModel_Error(t *testing.T) { expectedExecErr = execErr expectedExecErr.Message = string(make([]byte, 10)) assert.Nil(t, err) - assert.True(t, proto.Equal(expectedExecErr, taskExecution.Closure.GetError())) + assert.True(t, proto.Equal(expectedExecErr, taskExecution.GetClosure().GetError())) } func TestFromTaskExecutionModels(t *testing.T) { diff --git a/flyteadmin/pkg/repositories/transformers/task_test.go b/flyteadmin/pkg/repositories/transformers/task_test.go index 1f01f4b720..8fac3863d7 100644 --- a/flyteadmin/pkg/repositories/transformers/task_test.go +++ b/flyteadmin/pkg/repositories/transformers/task_test.go @@ -52,10 +52,10 @@ func TestFromTaskModel(t *testing.T) { Domain: "domain", Name: "name", Version: "version", - }, task.Id)) + }, task.GetId())) expectedClosure := testutils.GetTaskClosure() expectedClosure.CreatedAt = createdAtProto - assert.True(t, proto.Equal(expectedClosure, task.Closure)) + assert.True(t, proto.Equal(expectedClosure, task.GetClosure())) } func TestFromTaskModels(t *testing.T) { @@ -100,10 +100,10 @@ func TestFromTaskModels(t *testing.T) { Domain: "domain a", Name: "name a", Version: "version a", - }, taskList[0].Id)) + }, taskList[0].GetId())) expectedClosure := testutils.GetTaskClosure() expectedClosure.CreatedAt = createdAtAProto - assert.True(t, proto.Equal(expectedClosure, taskList[0].Closure)) + assert.True(t, proto.Equal(expectedClosure, taskList[0].GetClosure())) assert.True(t, proto.Equal(&core.Identifier{ ResourceType: core.ResourceType_TASK, @@ -111,11 +111,11 @@ func TestFromTaskModels(t *testing.T) { Domain: "domain b", Name: "name b", Version: "version b", - }, taskList[1].Id)) + }, taskList[1].GetId())) expectedClosure = &admin.TaskClosure{ CreatedAt: createdAtBProto, } - assert.True(t, proto.Equal(expectedClosure, taskList[1].Closure)) + assert.True(t, proto.Equal(expectedClosure, taskList[1].GetClosure())) } func TestFromTaskModelsToIdentifiers(t *testing.T) { @@ -139,10 +139,10 @@ func TestFromTaskModelsToIdentifiers(t *testing.T) { } taskIds := FromTaskModelsToIdentifiers(taskModels) - assert.Equal(t, "domain a", taskIds[0].Domain) - assert.Equal(t, "project a", taskIds[0].Project) - assert.Equal(t, "name a", taskIds[0].Name) - assert.Equal(t, "domain b", taskIds[1].Domain) - assert.Equal(t, "project b", taskIds[1].Project) - assert.Equal(t, "name b", taskIds[1].Name) + assert.Equal(t, "domain a", taskIds[0].GetDomain()) + assert.Equal(t, "project a", taskIds[0].GetProject()) + assert.Equal(t, "name a", taskIds[0].GetName()) + assert.Equal(t, "domain b", taskIds[1].GetDomain()) + assert.Equal(t, "project b", taskIds[1].GetProject()) + assert.Equal(t, "name b", taskIds[1].GetName()) } diff --git a/flyteadmin/pkg/repositories/transformers/workflow.go b/flyteadmin/pkg/repositories/transformers/workflow.go index a796987955..0dbf7196a8 100644 --- a/flyteadmin/pkg/repositories/transformers/workflow.go +++ b/flyteadmin/pkg/repositories/transformers/workflow.go @@ -17,8 +17,8 @@ import ( func CreateWorkflowModel(request *admin.WorkflowCreateRequest, remoteClosureIdentifier string, digest []byte) (models.Workflow, error) { var typedInterface []byte - if request.Spec != nil && request.Spec.Template != nil && request.Spec.Template.Interface != nil { - serializedTypedInterface, err := proto.Marshal(request.Spec.Template.Interface) + if request.GetSpec() != nil && request.GetSpec().GetTemplate() != nil && request.GetSpec().GetTemplate().GetInterface() != nil { + serializedTypedInterface, err := proto.Marshal(request.GetSpec().GetTemplate().GetInterface()) if err != nil { return models.Workflow{}, errors.NewFlyteAdminError(codes.Internal, "Failed to serialize workflow spec") } @@ -26,10 +26,10 @@ func CreateWorkflowModel(request *admin.WorkflowCreateRequest, remoteClosureIden } return models.Workflow{ WorkflowKey: models.WorkflowKey{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Id.Name, - Version: request.Id.Version, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetId().GetName(), + Version: request.GetId().GetVersion(), }, TypedInterface: typedInterface, RemoteClosureIdentifier: remoteClosureIdentifier, @@ -54,7 +54,7 @@ func FromWorkflowModel(workflowModel models.Workflow) (admin.Workflow, error) { if len(workflowModel.TypedInterface) > 0 { err = proto.Unmarshal(workflowModel.TypedInterface, &workflowInterface) if err != nil { - return admin.Workflow{}, errors.NewFlyteAdminErrorf(codes.Internal, fmt.Sprintf("failed to unmarshal workflow %v interface. Error message: %v", workflowModel.ID, err.Error())) + return admin.Workflow{}, errors.NewFlyteAdminErrorf(codes.Internal, fmt.Sprintf("failed to unmarshal workflow %v interface. Error message: %v", workflowModel.ID, err.Error())) //nolint } } diff --git a/flyteadmin/pkg/repositories/transformers/workflow_test.go b/flyteadmin/pkg/repositories/transformers/workflow_test.go index 0f29aaa64e..95a698075d 100644 --- a/flyteadmin/pkg/repositories/transformers/workflow_test.go +++ b/flyteadmin/pkg/repositories/transformers/workflow_test.go @@ -70,7 +70,7 @@ func TestFromWorkflowModel(t *testing.T) { Domain: "domain", Name: "name", Version: "version", - }, workflow.Id)) + }, workflow.GetId())) var workflowInterface core.TypedInterface err = proto.Unmarshal(workflowModel.TypedInterface, &workflowInterface) @@ -85,7 +85,7 @@ func TestFromWorkflowModel(t *testing.T) { }, }, }, - }, workflow.Closure)) + }, workflow.GetClosure())) } func TestFromWorkflowModels(t *testing.T) { @@ -131,7 +131,7 @@ func TestFromWorkflowModels(t *testing.T) { Domain: "domain a", Name: "name a", Version: "version a", - }, workflowList[0].Id)) + }, workflowList[0].GetId())) workflowInterface := testutils.GetWorkflowRequestInterface() assert.NoError(t, err) @@ -145,7 +145,7 @@ func TestFromWorkflowModels(t *testing.T) { }, }, }, - }, workflowList[0].Closure)) + }, workflowList[0].GetClosure())) assert.True(t, proto.Equal(&core.Identifier{ ResourceType: core.ResourceType_WORKFLOW, @@ -153,7 +153,7 @@ func TestFromWorkflowModels(t *testing.T) { Domain: "domain b", Name: "name b", Version: "version b", - }, workflowList[1].Id)) + }, workflowList[1].GetId())) assert.True(t, proto.Equal(&admin.WorkflowClosure{ CreatedAt: createdAtBProto, @@ -164,5 +164,5 @@ func TestFromWorkflowModels(t *testing.T) { }, }, }, - }, workflowList[1].Closure)) + }, workflowList[1].GetClosure())) } diff --git a/flyteadmin/pkg/rpc/adminservice/description_entity.go b/flyteadmin/pkg/rpc/adminservice/description_entity.go index 91e3f0f134..b8bb5f1d06 100644 --- a/flyteadmin/pkg/rpc/adminservice/description_entity.go +++ b/flyteadmin/pkg/rpc/adminservice/description_entity.go @@ -12,7 +12,7 @@ import ( func (m *AdminService) GetDescriptionEntity(ctx context.Context, request *admin.ObjectGetRequest) (*admin.DescriptionEntity, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.Id != nil && request.Id.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetId() != nil && request.GetId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.Id.ResourceType = core.ResourceType_TASK } diff --git a/flyteadmin/pkg/rpc/adminservice/launch_plan.go b/flyteadmin/pkg/rpc/adminservice/launch_plan.go index f0dabdb76c..1cdf757362 100644 --- a/flyteadmin/pkg/rpc/adminservice/launch_plan.go +++ b/flyteadmin/pkg/rpc/adminservice/launch_plan.go @@ -26,7 +26,7 @@ func (m *AdminService) CreateLaunchPlan( func (m *AdminService) GetLaunchPlan(ctx context.Context, request *admin.ObjectGetRequest) (*admin.LaunchPlan, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.Id != nil && request.Id.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetId() != nil && request.GetId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.Id.ResourceType = core.ResourceType_LAUNCH_PLAN } @@ -60,7 +60,7 @@ func (m *AdminService) UpdateLaunchPlan(ctx context.Context, request *admin.Laun *admin.LaunchPlanUpdateResponse, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.Id != nil && request.Id.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetId() != nil && request.GetId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.Id.ResourceType = core.ResourceType_LAUNCH_PLAN } diff --git a/flyteadmin/pkg/rpc/adminservice/node_execution.go b/flyteadmin/pkg/rpc/adminservice/node_execution.go index e8965edd1d..3f6383a4e4 100644 --- a/flyteadmin/pkg/rpc/adminservice/node_execution.go +++ b/flyteadmin/pkg/rpc/adminservice/node_execution.go @@ -68,8 +68,8 @@ func (m *AdminService) ListNodeExecutionsForTask( ctx context.Context, request *admin.NodeExecutionForTaskListRequest) (*admin.NodeExecutionList, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.TaskExecutionId != nil && request.TaskExecutionId.TaskId != nil && - request.TaskExecutionId.TaskId.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetTaskExecutionId() != nil && request.GetTaskExecutionId().GetTaskId() != nil && + request.GetTaskExecutionId().GetTaskId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.TaskExecutionId.TaskId.ResourceType = core.ResourceType_TASK } diff --git a/flyteadmin/pkg/rpc/adminservice/task.go b/flyteadmin/pkg/rpc/adminservice/task.go index 50ed9f8eba..9d4e2883b3 100644 --- a/flyteadmin/pkg/rpc/adminservice/task.go +++ b/flyteadmin/pkg/rpc/adminservice/task.go @@ -27,7 +27,7 @@ func (m *AdminService) CreateTask( func (m *AdminService) GetTask(ctx context.Context, request *admin.ObjectGetRequest) (*admin.Task, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.Id != nil && request.Id.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetId() != nil && request.GetId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.Id.ResourceType = core.ResourceType_TASK } diff --git a/flyteadmin/pkg/rpc/adminservice/task_execution.go b/flyteadmin/pkg/rpc/adminservice/task_execution.go index d0d8a99d56..3b98fe5057 100644 --- a/flyteadmin/pkg/rpc/adminservice/task_execution.go +++ b/flyteadmin/pkg/rpc/adminservice/task_execution.go @@ -28,11 +28,11 @@ func (m *AdminService) GetTaskExecution( ctx context.Context, request *admin.TaskExecutionGetRequest) (*admin.TaskExecution, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.Id != nil && request.Id.TaskId != nil && request.Id.TaskId.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetId() != nil && request.GetId().GetTaskId() != nil && request.GetId().GetTaskId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.Id.TaskId.ResourceType = core.ResourceType_TASK } - if err := validation.ValidateTaskExecutionIdentifier(request.Id); err != nil { + if err := validation.ValidateTaskExecutionIdentifier(request.GetId()); err != nil { return nil, err } @@ -70,7 +70,7 @@ func (m *AdminService) GetTaskExecutionData( ctx context.Context, request *admin.TaskExecutionGetDataRequest) (*admin.TaskExecutionGetDataResponse, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.Id != nil && request.Id.TaskId != nil && request.Id.TaskId.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetId() != nil && request.GetId().GetTaskId() != nil && request.GetId().GetTaskId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.Id.TaskId.ResourceType = core.ResourceType_TASK } diff --git a/flyteadmin/pkg/rpc/adminservice/tests/execution_test.go b/flyteadmin/pkg/rpc/adminservice/tests/execution_test.go index f541eea806..ef73e60eaa 100644 --- a/flyteadmin/pkg/rpc/adminservice/tests/execution_test.go +++ b/flyteadmin/pkg/rpc/adminservice/tests/execution_test.go @@ -36,9 +36,9 @@ func TestCreateExecutionHappyCase(t *testing.T) { request *admin.ExecutionCreateRequest, requestedAt time.Time) (*admin.ExecutionCreateResponse, error) { return &admin.ExecutionCreateResponse{ Id: &core.WorkflowExecutionIdentifier{ - Project: request.Project, - Domain: request.Domain, - Name: request.Name, + Project: request.GetProject(), + Domain: request.GetDomain(), + Name: request.GetName(), }, }, nil }, @@ -52,7 +52,7 @@ func TestCreateExecutionHappyCase(t *testing.T) { Domain: "Domain", Project: "Project", }) - assert.True(t, proto.Equal(&workflowExecutionIdentifier, resp.Id)) + assert.True(t, proto.Equal(&workflowExecutionIdentifier, resp.GetId())) assert.NoError(t, err) } @@ -64,9 +64,9 @@ func TestCreateExecutionError(t *testing.T) { func(ctx context.Context, request *admin.ExecutionCreateRequest, requestedAt time.Time) (*admin.ExecutionCreateResponse, error) { return nil, repoErrors.GetMissingEntityError("execution", &core.Identifier{ - Project: request.Project, - Domain: request.Domain, - Name: request.Name, + Project: request.GetProject(), + Domain: request.GetDomain(), + Name: request.GetName(), }) }, ) @@ -93,9 +93,9 @@ func TestRelaunchExecutionHappyCase(t *testing.T) { request *admin.ExecutionRelaunchRequest, requestedAt time.Time) (*admin.ExecutionCreateResponse, error) { return &admin.ExecutionCreateResponse{ Id: &core.WorkflowExecutionIdentifier{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Name, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetName(), }, }, nil }, @@ -111,9 +111,9 @@ func TestRelaunchExecutionHappyCase(t *testing.T) { }, Name: "name", }) - assert.Equal(t, "project", resp.Id.Project) - assert.Equal(t, "domain", resp.Id.Domain) - assert.Equal(t, "name", resp.Id.Name) + assert.Equal(t, "project", resp.GetId().GetProject()) + assert.Equal(t, "domain", resp.GetId().GetDomain()) + assert.Equal(t, "name", resp.GetId().GetName()) assert.NoError(t, err) } @@ -124,7 +124,7 @@ func TestRelaunchExecutionError(t *testing.T) { mockExecutionManager.SetRelaunchCallback( func(ctx context.Context, request *admin.ExecutionRelaunchRequest, requestedAt time.Time) (*admin.ExecutionCreateResponse, error) { - return nil, repoErrors.GetMissingEntityError("execution", request.Id) + return nil, repoErrors.GetMissingEntityError("execution", request.GetId()) }, ) mockServer := NewMockAdminServer(NewMockAdminServerInput{ @@ -148,9 +148,9 @@ func TestRecoverExecutionHappyCase(t *testing.T) { request *admin.ExecutionRecoverRequest, requestedAt time.Time) (*admin.ExecutionCreateResponse, error) { return &admin.ExecutionCreateResponse{ Id: &core.WorkflowExecutionIdentifier{ - Project: request.Id.Project, - Domain: request.Id.Domain, - Name: request.Name, + Project: request.GetId().GetProject(), + Domain: request.GetId().GetDomain(), + Name: request.GetName(), }, }, nil } @@ -166,9 +166,9 @@ func TestRecoverExecutionHappyCase(t *testing.T) { }, Name: "name", }) - assert.Equal(t, "project", resp.Id.Project) - assert.Equal(t, "domain", resp.Id.Domain) - assert.Equal(t, "name", resp.Id.Name) + assert.Equal(t, "project", resp.GetId().GetProject()) + assert.Equal(t, "domain", resp.GetId().GetDomain()) + assert.Equal(t, "name", resp.GetId().GetName()) assert.NoError(t, err) } @@ -179,7 +179,7 @@ func TestRecoverExecutionError(t *testing.T) { mockExecutionManager.RecoverExecutionFunc = func(ctx context.Context, request *admin.ExecutionRecoverRequest, requestedAt time.Time) (*admin.ExecutionCreateResponse, error) { - return nil, repoErrors.GetMissingEntityError("execution", request.Id) + return nil, repoErrors.GetMissingEntityError("execution", request.GetId()) } mockServer := NewMockAdminServer(NewMockAdminServerInput{ executionManager: &mockExecutionManager, @@ -199,10 +199,10 @@ func TestCreateWorkflowEvent(t *testing.T) { mockExecutionManager.SetCreateEventCallback( func(ctx context.Context, request *admin.WorkflowExecutionEventRequest) ( *admin.WorkflowExecutionEventResponse, error) { - assert.Equal(t, requestID, request.RequestId) - assert.NotNil(t, request.Event) - assert.True(t, proto.Equal(&workflowExecutionIdentifier, request.Event.ExecutionId)) - assert.Equal(t, phase, request.Event.Phase) + assert.Equal(t, requestID, request.GetRequestId()) + assert.NotNil(t, request.GetEvent()) + assert.True(t, proto.Equal(&workflowExecutionIdentifier, request.GetEvent().GetExecutionId())) + assert.Equal(t, phase, request.GetEvent().GetPhase()) return &admin.WorkflowExecutionEventResponse{}, nil }) mockServer := NewMockAdminServer(NewMockAdminServerInput{ @@ -248,7 +248,7 @@ func TestGetExecution(t *testing.T) { mockExecutionManager.SetGetCallback( func(ctx context.Context, request *admin.WorkflowExecutionGetRequest) (*admin.Execution, error) { - assert.True(t, proto.Equal(&workflowExecutionIdentifier, request.Id)) + assert.True(t, proto.Equal(&workflowExecutionIdentifier, request.GetId())) return response, nil }, ) @@ -288,7 +288,7 @@ func TestUpdateExecution(t *testing.T) { mockExecutionManager.SetUpdateExecutionCallback( func(ctx context.Context, request *admin.ExecutionUpdateRequest, requestedAt time.Time) (*admin.ExecutionUpdateResponse, error) { - assert.True(t, proto.Equal(&workflowExecutionIdentifier, request.Id)) + assert.True(t, proto.Equal(&workflowExecutionIdentifier, request.GetId())) return response, nil }, ) @@ -326,9 +326,9 @@ func TestListExecutions(t *testing.T) { mockExecutionManager := mocks.MockExecutionManager{} mockExecutionManager.SetListCallback(func(ctx context.Context, request *admin.ResourceListRequest) ( *admin.ExecutionList, error) { - assert.Equal(t, "project", request.Id.Project) - assert.Equal(t, "domain", request.Id.Domain) - assert.Equal(t, uint32(1), request.Limit) + assert.Equal(t, "project", request.GetId().GetProject()) + assert.Equal(t, "domain", request.GetId().GetDomain()) + assert.Equal(t, uint32(1), request.GetLimit()) return &admin.ExecutionList{ Executions: []*admin.Execution{ { @@ -350,7 +350,7 @@ func TestListExecutions(t *testing.T) { Limit: 1, }) assert.NoError(t, err) - assert.Len(t, response.Executions, 1) + assert.Len(t, response.GetExecutions(), 1) } func TestListExecutionsError(t *testing.T) { @@ -386,8 +386,8 @@ func TestTerminateExecution(t *testing.T) { abortCause := "abort cause" mockExecutionManager.SetTerminateExecutionCallback(func( ctx context.Context, request *admin.ExecutionTerminateRequest) (*admin.ExecutionTerminateResponse, error) { - assert.True(t, proto.Equal(&identifier, request.Id)) - assert.Equal(t, abortCause, request.Cause) + assert.True(t, proto.Equal(&identifier, request.GetId())) + assert.Equal(t, abortCause, request.GetCause()) return &admin.ExecutionTerminateResponse{}, nil }) mockServer := NewMockAdminServer(NewMockAdminServerInput{ diff --git a/flyteadmin/pkg/rpc/adminservice/tests/launch_plan_test.go b/flyteadmin/pkg/rpc/adminservice/tests/launch_plan_test.go index 00cd10e04f..4fabdbb9c0 100644 --- a/flyteadmin/pkg/rpc/adminservice/tests/launch_plan_test.go +++ b/flyteadmin/pkg/rpc/adminservice/tests/launch_plan_test.go @@ -47,7 +47,7 @@ func TestCreateLaunchPlanError(t *testing.T) { mockLaunchPlanManager.SetCreateCallback( func(ctx context.Context, request *admin.LaunchPlanCreateRequest) (*admin.LaunchPlanCreateResponse, error) { - return nil, errors.GetMissingEntityError(core.ResourceType_LAUNCH_PLAN.String(), request.Id) + return nil, errors.GetMissingEntityError(core.ResourceType_LAUNCH_PLAN.String(), request.GetId()) }, ) mockServer := NewMockAdminServer(NewMockAdminServerInput{ diff --git a/flyteadmin/pkg/rpc/adminservice/tests/node_execution_test.go b/flyteadmin/pkg/rpc/adminservice/tests/node_execution_test.go index 575140fef0..72cdc57ea5 100644 --- a/flyteadmin/pkg/rpc/adminservice/tests/node_execution_test.go +++ b/flyteadmin/pkg/rpc/adminservice/tests/node_execution_test.go @@ -32,10 +32,10 @@ func TestCreateNodeEvent(t *testing.T) { mockNodeExecutionManager.SetCreateNodeEventCallback( func(ctx context.Context, request *admin.NodeExecutionEventRequest) ( *admin.NodeExecutionEventResponse, error) { - assert.Equal(t, requestID, request.RequestId) - assert.NotNil(t, request.Event) - assert.True(t, proto.Equal(&nodeExecutionID, request.Event.Id)) - assert.Equal(t, phase, request.Event.Phase) + assert.Equal(t, requestID, request.GetRequestId()) + assert.NotNil(t, request.GetEvent()) + assert.True(t, proto.Equal(&nodeExecutionID, request.GetEvent().GetId())) + assert.Equal(t, phase, request.GetEvent().GetPhase()) return &admin.NodeExecutionEventResponse{}, nil }) mockServer := NewMockAdminServer(NewMockAdminServerInput{ @@ -82,7 +82,7 @@ func TestGetNodeExecution(t *testing.T) { mockNodeExecutionManager.SetGetNodeExecutionFunc( func(ctx context.Context, request *admin.NodeExecutionGetRequest) (*admin.NodeExecution, error) { - assert.True(t, proto.Equal(&nodeExecutionID, request.Id)) + assert.True(t, proto.Equal(&nodeExecutionID, request.GetId())) return response, nil }, ) @@ -102,7 +102,7 @@ func TestGetNodeExecutionError(t *testing.T) { mockNodeExecutionManager.SetGetNodeExecutionFunc( func(ctx context.Context, request *admin.NodeExecutionGetRequest) (*admin.NodeExecution, error) { - assert.True(t, proto.Equal(&nodeExecutionID, request.Id)) + assert.True(t, proto.Equal(&nodeExecutionID, request.GetId())) return nil, errors.New("expected error") }, ) @@ -123,9 +123,9 @@ func TestListNodeExecutions(t *testing.T) { filters := "encoded filters probably" mockNodeExecutionManager.SetListNodeExecutionsFunc(func(ctx context.Context, request *admin.NodeExecutionListRequest) ( *admin.NodeExecutionList, error) { - assert.Equal(t, filters, request.Filters) - assert.Equal(t, uint32(1), request.Limit) - assert.Equal(t, "20", request.Token) + assert.Equal(t, filters, request.GetFilters()) + assert.Equal(t, uint32(1), request.GetLimit()) + assert.Equal(t, "20", request.GetToken()) return &admin.NodeExecutionList{ NodeExecutions: []*admin.NodeExecution{ { @@ -145,7 +145,7 @@ func TestListNodeExecutions(t *testing.T) { Token: "20", }) assert.NoError(t, err) - assert.Len(t, response.NodeExecutions, 1) + assert.Len(t, response.GetNodeExecutions(), 1) } func TestListNodeExecutionsError(t *testing.T) { @@ -174,9 +174,9 @@ func TestListNodeExecutionsForTask(t *testing.T) { mockNodeExecutionManager.SetListNodeExecutionsForTaskFunc( func(ctx context.Context, request *admin.NodeExecutionForTaskListRequest) ( *admin.NodeExecutionList, error) { - assert.Equal(t, filters, request.Filters) - assert.Equal(t, uint32(1), request.Limit) - assert.Equal(t, "20", request.Token) + assert.Equal(t, filters, request.GetFilters()) + assert.Equal(t, uint32(1), request.GetLimit()) + assert.Equal(t, "20", request.GetToken()) return &admin.NodeExecutionList{ NodeExecutions: []*admin.NodeExecution{ { @@ -196,7 +196,7 @@ func TestListNodeExecutionsForTask(t *testing.T) { Token: "20", }) assert.NoError(t, err) - assert.Len(t, response.NodeExecutions, 1) + assert.Len(t, response.GetNodeExecutions(), 1) } func TestListNodeExecutionsForTaskError(t *testing.T) { @@ -225,7 +225,7 @@ func TestGetNodeExecutionData(t *testing.T) { mockNodeExecutionManager.SetGetNodeExecutionDataFunc( func(ctx context.Context, request *admin.NodeExecutionGetDataRequest) (*admin.NodeExecutionGetDataResponse, error) { - assert.True(t, proto.Equal(&nodeExecutionID, request.Id)) + assert.True(t, proto.Equal(&nodeExecutionID, request.GetId())) return &admin.NodeExecutionGetDataResponse{ Inputs: &admin.UrlBlob{ Url: "inputs", @@ -249,9 +249,9 @@ func TestGetNodeExecutionData(t *testing.T) { assert.True(t, proto.Equal(&admin.UrlBlob{ Url: "inputs", Bytes: 100, - }, resp.Inputs)) + }, resp.GetInputs())) assert.True(t, proto.Equal(&admin.UrlBlob{ Url: "outputs", Bytes: 200, - }, resp.Outputs)) + }, resp.GetOutputs())) } diff --git a/flyteadmin/pkg/rpc/adminservice/tests/task_execution_test.go b/flyteadmin/pkg/rpc/adminservice/tests/task_execution_test.go index b261401905..637426c455 100644 --- a/flyteadmin/pkg/rpc/adminservice/tests/task_execution_test.go +++ b/flyteadmin/pkg/rpc/adminservice/tests/task_execution_test.go @@ -47,11 +47,11 @@ func TestTaskExecution(t *testing.T) { mockTaskExecutionManager.SetCreateTaskEventCallback( func(ctx context.Context, request *admin.TaskExecutionEventRequest) ( *admin.TaskExecutionEventResponse, error) { - assert.Equal(t, requestID, request.RequestId) - assert.NotNil(t, request.Event) - assert.True(t, proto.Equal(taskID, request.Event.TaskId)) - assert.Equal(t, phase, request.Event.Phase) - assert.Equal(t, retryAttempt, request.Event.RetryAttempt) + assert.Equal(t, requestID, request.GetRequestId()) + assert.NotNil(t, request.GetEvent()) + assert.True(t, proto.Equal(taskID, request.GetEvent().GetTaskId())) + assert.Equal(t, phase, request.GetEvent().GetPhase()) + assert.Equal(t, retryAttempt, request.GetEvent().GetRetryAttempt()) return &admin.TaskExecutionEventResponse{}, nil }) mockServer := NewMockAdminServer(NewMockAdminServerInput{ @@ -143,9 +143,9 @@ func TestTaskExecution(t *testing.T) { mockTaskExecutionManager.SetGetTaskExecutionCallback( func(ctx context.Context, request *admin.TaskExecutionGetRequest) ( *admin.TaskExecution, error) { - assert.Equal(t, taskID, request.Id.TaskId) - assert.Equal(t, nodeExecutionID, request.Id.NodeExecutionId) - assert.Equal(t, retryAttempt, request.Id.RetryAttempt) + assert.Equal(t, taskID, request.GetId().GetTaskId()) + assert.Equal(t, nodeExecutionID, request.GetId().GetNodeExecutionId()) + assert.Equal(t, retryAttempt, request.GetId().GetRetryAttempt()) return &admin.TaskExecution{}, nil }) mockServer := NewMockAdminServer(NewMockAdminServerInput{ @@ -232,8 +232,8 @@ func TestTaskExecution(t *testing.T) { mockTaskExecutionManager.SetListTaskExecutionsCallback( func(ctx context.Context, request *admin.TaskExecutionListRequest) ( *admin.TaskExecutionList, error) { - assert.Equal(t, "1", request.Token) - assert.Equal(t, uint32(99), request.Limit) + assert.Equal(t, "1", request.GetToken()) + assert.Equal(t, uint32(99), request.GetLimit()) assert.True(t, proto.Equal(&core.NodeExecutionIdentifier{ NodeId: "nodey", ExecutionId: &core.WorkflowExecutionIdentifier{ @@ -241,7 +241,7 @@ func TestTaskExecution(t *testing.T) { Domain: "domain", Name: "name", }, - }, request.NodeExecutionId)) + }, request.GetNodeExecutionId())) return &admin.TaskExecutionList{}, nil }) mockServer := NewMockAdminServer(NewMockAdminServerInput{ @@ -344,9 +344,9 @@ func TestGetTaskExecutionData(t *testing.T) { assert.True(t, proto.Equal(&admin.UrlBlob{ Url: "inputs", Bytes: 100, - }, resp.Inputs)) + }, resp.GetInputs())) assert.True(t, proto.Equal(&admin.UrlBlob{ Url: "outputs", Bytes: 200, - }, resp.Outputs)) + }, resp.GetOutputs())) } diff --git a/flyteadmin/pkg/rpc/adminservice/tests/task_test.go b/flyteadmin/pkg/rpc/adminservice/tests/task_test.go index 2e4d5a8287..bd17b1baa6 100644 --- a/flyteadmin/pkg/rpc/adminservice/tests/task_test.go +++ b/flyteadmin/pkg/rpc/adminservice/tests/task_test.go @@ -49,7 +49,7 @@ func TestTaskError(t *testing.T) { mockTaskManager.SetCreateCallback( func(ctx context.Context, request *admin.TaskCreateRequest) (*admin.TaskCreateResponse, error) { - return nil, errors.GetMissingEntityError(core.ResourceType_TASK.String(), request.Id) + return nil, errors.GetMissingEntityError(core.ResourceType_TASK.String(), request.GetId()) }, ) mockServer := NewMockAdminServer(NewMockAdminServerInput{ @@ -77,7 +77,7 @@ func TestListUniqueTaskIds(t *testing.T) { mockTaskManager.SetListUniqueIdsFunc(func(ctx context.Context, request *admin.NamedEntityIdentifierListRequest) ( *admin.NamedEntityIdentifierList, error) { - assert.Equal(t, "staging", request.Domain) + assert.Equal(t, "staging", request.GetDomain()) return nil, nil }) mockServer := NewMockAdminServer(NewMockAdminServerInput{ diff --git a/flyteadmin/pkg/rpc/adminservice/tests/workflow_test.go b/flyteadmin/pkg/rpc/adminservice/tests/workflow_test.go index 915c127ac2..5799b32519 100644 --- a/flyteadmin/pkg/rpc/adminservice/tests/workflow_test.go +++ b/flyteadmin/pkg/rpc/adminservice/tests/workflow_test.go @@ -49,7 +49,7 @@ func TestCreateWorkflowError(t *testing.T) { mockWorkflowManager.SetCreateCallback( func(ctx context.Context, request *admin.WorkflowCreateRequest) (*admin.WorkflowCreateResponse, error) { - return nil, errors.GetMissingEntityError(core.ResourceType_WORKFLOW.String(), request.Id) + return nil, errors.GetMissingEntityError(core.ResourceType_WORKFLOW.String(), request.GetId()) }, ) mockServer := NewMockAdminServer(NewMockAdminServerInput{ diff --git a/flyteadmin/pkg/rpc/adminservice/util/metrics.go b/flyteadmin/pkg/rpc/adminservice/util/metrics.go index bcab066a41..1274541a9b 100644 --- a/flyteadmin/pkg/rpc/adminservice/util/metrics.go +++ b/flyteadmin/pkg/rpc/adminservice/util/metrics.go @@ -54,7 +54,7 @@ func (m *RequestMetrics) Success() { func newResponseCodeMetrics(scope promutils.Scope) responseCodeMetrics { responseCodeCounters := make(map[codes.Code]prometheus.Counter) for i := 0; i < maxGRPCStatusCode; i++ { - code := codes.Code(i) + code := codes.Code(i) // #nosec G115 responseCodeCounters[code] = scope.MustNewCounter(code.String(), fmt.Sprintf("count of responses returning: %s", code.String())) } diff --git a/flyteadmin/pkg/rpc/adminservice/workflow.go b/flyteadmin/pkg/rpc/adminservice/workflow.go index ee9a6b4eff..0e0b425f7c 100644 --- a/flyteadmin/pkg/rpc/adminservice/workflow.go +++ b/flyteadmin/pkg/rpc/adminservice/workflow.go @@ -27,7 +27,7 @@ func (m *AdminService) CreateWorkflow( func (m *AdminService) GetWorkflow(ctx context.Context, request *admin.ObjectGetRequest) (*admin.Workflow, error) { // NOTE: When the Get HTTP endpoint is called the resource type is implicit (from the URL) so we must add it // to the request. - if request.Id != nil && request.Id.ResourceType == core.ResourceType_UNSPECIFIED { + if request.GetId() != nil && request.GetId().GetResourceType() == core.ResourceType_UNSPECIFIED { logger.Infof(ctx, "Adding resource type for unspecified value in request: [%+v]", request) request.Id.ResourceType = core.ResourceType_WORKFLOW } diff --git a/flyteadmin/pkg/runtime/interfaces/application_configuration.go b/flyteadmin/pkg/runtime/interfaces/application_configuration.go index 55791a1538..2681bcc95e 100644 --- a/flyteadmin/pkg/runtime/interfaces/application_configuration.go +++ b/flyteadmin/pkg/runtime/interfaces/application_configuration.go @@ -212,16 +212,16 @@ func (a *ApplicationConfig) GetAsWorkflowExecutionConfig() *admin.WorkflowExecut } // For the others, we only add the field when the field is set in the config. - if a.GetSecurityContext().RunAs.GetK8SServiceAccount() != "" || a.GetSecurityContext().RunAs.GetIamRole() != "" { + if a.GetSecurityContext().GetRunAs().GetK8SServiceAccount() != "" || a.GetSecurityContext().GetRunAs().GetIamRole() != "" { wec.SecurityContext = a.GetSecurityContext() } - if a.GetRawOutputDataConfig().OutputLocationPrefix != "" { + if a.GetRawOutputDataConfig().GetOutputLocationPrefix() != "" { wec.RawOutputDataConfig = a.GetRawOutputDataConfig() } - if len(a.GetLabels().Values) > 0 { + if len(a.GetLabels().GetValues()) > 0 { wec.Labels = a.GetLabels() } - if len(a.GetAnnotations().Values) > 0 { + if len(a.GetAnnotations().GetValues()) > 0 { wec.Annotations = a.GetAnnotations() } @@ -603,6 +603,8 @@ type EventsPublisherConfig struct { TopicName string `json:"topicName"` // Event types: task, node, workflow executions EventTypes []string `json:"eventTypes"` + // Whether to publish enriched events for all workflow execution events + EnrichAllWorkflowEventTypes bool `json:"enrichAllWorkflowEventTypes"` } type ExternalEventsConfig struct { diff --git a/flyteadmin/pkg/workflowengine/impl/interface_provider.go b/flyteadmin/pkg/workflowengine/impl/interface_provider.go index 566613f635..6bae0c9a05 100644 --- a/flyteadmin/pkg/workflowengine/impl/interface_provider.go +++ b/flyteadmin/pkg/workflowengine/impl/interface_provider.go @@ -42,8 +42,8 @@ func NewLaunchPlanInterfaceProvider(launchPlan models.LaunchPlan, identifier *co return &LaunchPlanInterfaceProvider{}, err } return &LaunchPlanInterfaceProvider{ - expectedInputs: closure.ExpectedInputs, - expectedOutputs: closure.ExpectedOutputs, + expectedInputs: closure.GetExpectedInputs(), + expectedOutputs: closure.GetExpectedOutputs(), identifier: identifier, }, nil } diff --git a/flyteadmin/pkg/workflowengine/impl/interface_provider_test.go b/flyteadmin/pkg/workflowengine/impl/interface_provider_test.go index 4d96050f7d..5924dab20c 100644 --- a/flyteadmin/pkg/workflowengine/impl/interface_provider_test.go +++ b/flyteadmin/pkg/workflowengine/impl/interface_provider_test.go @@ -64,14 +64,14 @@ func TestGetId(t *testing.T) { func TestGetExpectedInputs(t *testing.T) { provider := getProviderForTest(t) - assert.Contains(t, (*provider.GetExpectedInputs()).Parameters, "foo") - assert.NotNil(t, (*provider.GetExpectedInputs()).Parameters["foo"].Var.Type.GetSimple()) - assert.EqualValues(t, "STRING", (*provider.GetExpectedInputs()).Parameters["foo"].Var.Type.GetSimple().String()) - assert.NotNil(t, (*provider.GetExpectedInputs()).Parameters["foo"].GetDefault()) + assert.Contains(t, (*provider.GetExpectedInputs()).GetParameters(), "foo") + assert.NotNil(t, (*provider.GetExpectedInputs()).GetParameters()["foo"].GetVar().GetType().GetSimple()) + assert.EqualValues(t, "STRING", (*provider.GetExpectedInputs()).GetParameters()["foo"].GetVar().GetType().GetSimple().String()) + assert.NotNil(t, (*provider.GetExpectedInputs()).GetParameters()["foo"].GetDefault()) } func TestGetExpectedOutputs(t *testing.T) { provider := getProviderForTest(t) - assert.EqualValues(t, outputs.Variables["foo"].GetType().GetType(), - provider.GetExpectedOutputs().Variables["foo"].GetType().GetType()) + assert.EqualValues(t, outputs.GetVariables()["foo"].GetType().GetType(), + provider.GetExpectedOutputs().GetVariables()["foo"].GetType().GetType()) } diff --git a/flyteadmin/pkg/workflowengine/impl/k8s_executor.go b/flyteadmin/pkg/workflowengine/impl/k8s_executor.go index d941cc8309..03de2bbba9 100644 --- a/flyteadmin/pkg/workflowengine/impl/k8s_executor.go +++ b/flyteadmin/pkg/workflowengine/impl/k8s_executor.go @@ -37,7 +37,7 @@ func (e K8sWorkflowExecutor) Execute(ctx context.Context, data interfaces.Execut flyteWf, err := e.workflowBuilder.Build(data.WorkflowClosure, data.ExecutionParameters.Inputs, data.ExecutionID, data.Namespace) if err != nil { logger.Infof(ctx, "failed to build the workflow [%+v] %v", - data.WorkflowClosure.Primary.Template.Id, err) + data.WorkflowClosure.GetPrimary().GetTemplate().GetId(), err) return interfaces.ExecutionResponse{}, err } err = PrepareFlyteWorkflow(data, flyteWf) @@ -64,11 +64,11 @@ func (e K8sWorkflowExecutor) Execute(ctx context.Context, data interfaces.Execut } executionTargetSpec := executioncluster.ExecutionTargetSpec{ - Project: data.ExecutionID.Project, - Domain: data.ExecutionID.Domain, + Project: data.ExecutionID.GetProject(), + Domain: data.ExecutionID.GetDomain(), Workflow: data.ReferenceWorkflowName, LaunchPlan: data.ReferenceWorkflowName, - ExecutionID: data.ExecutionID.Name, + ExecutionID: data.ExecutionID.GetName(), ExecutionClusterLabel: data.ExecutionParameters.ExecutionClusterLabel, } targetCluster, err := e.executionCluster.GetTarget(ctx, &executionTargetSpec) @@ -92,7 +92,7 @@ func (e K8sWorkflowExecutor) Abort(ctx context.Context, data interfaces.AbortDat TargetID: data.Cluster, }) if err != nil { - return errors.NewFlyteAdminErrorf(codes.Internal, err.Error()) + return errors.NewFlyteAdminErrorf(codes.Internal, err.Error()) //nolint } err = target.FlyteClient.FlyteworkflowV1alpha1().FlyteWorkflows(data.Namespace).Delete(ctx, data.ExecutionID.GetName(), v1.DeleteOptions{ PropagationPolicy: &deletePropagationBackground, diff --git a/flyteadmin/pkg/workflowengine/impl/k8s_executor_test.go b/flyteadmin/pkg/workflowengine/impl/k8s_executor_test.go index a2ecb51364..5b9db6dfe9 100644 --- a/flyteadmin/pkg/workflowengine/impl/k8s_executor_test.go +++ b/flyteadmin/pkg/workflowengine/impl/k8s_executor_test.go @@ -281,7 +281,7 @@ func TestExecute_MiscError(t *testing.T) { func TestAbort(t *testing.T) { fakeFlyteWorkflow := FakeFlyteWorkflow{} fakeFlyteWorkflow.deleteCallback = func(name string, options *v1.DeleteOptions) error { - assert.Equal(t, execID.Name, name) + assert.Equal(t, execID.GetName(), name) assert.Equal(t, options.PropagationPolicy, &deletePropagationBackground) return nil } @@ -306,7 +306,7 @@ func TestAbort_Notfound(t *testing.T) { return k8_api_err.NewNotFound(schema.GroupResource{ Group: "foo", Resource: "bar", - }, execID.Name) + }, execID.GetName()) } fakeFlyteWF.flyteWorkflowsCallback = func(ns string) v1alpha12.FlyteWorkflowInterface { assert.Equal(t, namespace, ns) diff --git a/flyteadmin/pkg/workflowengine/impl/prepare_execution.go b/flyteadmin/pkg/workflowengine/impl/prepare_execution.go index 169cb15616..70afadbd7b 100644 --- a/flyteadmin/pkg/workflowengine/impl/prepare_execution.go +++ b/flyteadmin/pkg/workflowengine/impl/prepare_execution.go @@ -26,20 +26,20 @@ func addMapValues(overrides map[string]string, defaultValues map[string]string) } func addPermissions(securityCtx *core.SecurityContext, roleNameKey string, flyteWf *v1alpha1.FlyteWorkflow) { - if securityCtx == nil || securityCtx.RunAs == nil { + if securityCtx == nil || securityCtx.GetRunAs() == nil { return } securityCtxCopy, _ := proto.Clone(securityCtx).(*core.SecurityContext) flyteWf.SecurityContext = *securityCtxCopy - if len(securityCtx.RunAs.IamRole) > 0 { + if len(securityCtx.GetRunAs().GetIamRole()) > 0 { if flyteWf.Annotations == nil { flyteWf.Annotations = map[string]string{} } - flyteWf.Annotations[roleNameKey] = securityCtx.RunAs.IamRole + flyteWf.Annotations[roleNameKey] = securityCtx.GetRunAs().GetIamRole() } - if len(securityCtx.RunAs.K8SServiceAccount) > 0 { - flyteWf.ServiceAccountName = securityCtx.RunAs.K8SServiceAccount + if len(securityCtx.GetRunAs().GetK8SServiceAccount()) > 0 { + flyteWf.ServiceAccountName = securityCtx.GetRunAs().GetK8SServiceAccount() } } @@ -53,14 +53,14 @@ func addExecutionOverrides(taskPluginOverrides []*admin.PluginOverride, }, } for _, override := range taskPluginOverrides { - executionConfig.TaskPluginImpls[override.TaskType] = v1alpha1.TaskPluginOverride{ - PluginIDs: override.PluginId, - MissingPluginBehavior: override.MissingPluginBehavior, + executionConfig.TaskPluginImpls[override.GetTaskType()] = v1alpha1.TaskPluginOverride{ + PluginIDs: override.GetPluginId(), + MissingPluginBehavior: override.GetMissingPluginBehavior(), } } if workflowExecutionConfig != nil { - executionConfig.MaxParallelism = uint32(workflowExecutionConfig.MaxParallelism) + executionConfig.MaxParallelism = uint32(workflowExecutionConfig.GetMaxParallelism()) // #nosec G115 if workflowExecutionConfig.GetInterruptible() != nil { interruptible := workflowExecutionConfig.GetInterruptible().GetValue() @@ -71,8 +71,8 @@ func addExecutionOverrides(taskPluginOverrides []*admin.PluginOverride, envs := make(map[string]string) if workflowExecutionConfig.GetEnvs() != nil { - for _, v := range workflowExecutionConfig.GetEnvs().Values { - envs[v.Key] = v.Value + for _, v := range workflowExecutionConfig.GetEnvs().GetValues() { + envs[v.GetKey()] = v.GetValue() } executionConfig.EnvironmentVariables = envs } @@ -134,7 +134,7 @@ func PrepareFlyteWorkflow(data interfaces.ExecutionData, flyteWorkflow *v1alpha1 // add permissions from auth and security context. Adding permissions from auth would be removed once all clients // have migrated over to security context - addPermissions(data.ExecutionParameters.ExecutionConfig.SecurityContext, + addPermissions(data.ExecutionParameters.ExecutionConfig.GetSecurityContext(), data.ExecutionParameters.RoleNameKey, flyteWorkflow) labels := addMapValues(data.ExecutionParameters.Labels, flyteWorkflow.Labels) diff --git a/flyteadmin/scheduler/dbapi/event_scheduler_impl.go b/flyteadmin/scheduler/dbapi/event_scheduler_impl.go index 972a04480f..bffcace058 100644 --- a/flyteadmin/scheduler/dbapi/event_scheduler_impl.go +++ b/flyteadmin/scheduler/dbapi/event_scheduler_impl.go @@ -36,10 +36,10 @@ func (s *eventScheduler) AddSchedule(ctx context.Context, input interfaces.AddSc var fixedRateUnit admin.FixedRateUnit switch v := input.ScheduleExpression.GetScheduleExpression().(type) { case *admin.Schedule_Rate: - fixedRateValue = v.Rate.Value - fixedRateUnit = v.Rate.Unit + fixedRateValue = v.Rate.GetValue() + fixedRateUnit = v.Rate.GetUnit() case *admin.Schedule_CronSchedule: - cronString = v.CronSchedule.Schedule + cronString = v.CronSchedule.GetSchedule() default: return fmt.Errorf("failed adding schedule for unknown schedule expression type %v", v) } @@ -48,13 +48,13 @@ func (s *eventScheduler) AddSchedule(ctx context.Context, input interfaces.AddSc CronExpression: cronString, FixedRateValue: fixedRateValue, Unit: fixedRateUnit, - KickoffTimeInputArg: input.ScheduleExpression.KickoffTimeInputArg, + KickoffTimeInputArg: input.ScheduleExpression.GetKickoffTimeInputArg(), Active: &active, SchedulableEntityKey: models.SchedulableEntityKey{ - Project: input.Identifier.Project, - Domain: input.Identifier.Domain, - Name: input.Identifier.Name, - Version: input.Identifier.Version, + Project: input.Identifier.GetProject(), + Domain: input.Identifier.GetDomain(), + Name: input.Identifier.GetName(), + Version: input.Identifier.GetVersion(), }, } err := s.db.SchedulableEntityRepo().Activate(ctx, modelInput) @@ -69,10 +69,10 @@ func (s *eventScheduler) RemoveSchedule(ctx context.Context, input interfaces.Re logger.Infof(ctx, "Received call to remove schedule [%+v]. Will deactivate it in the scheduler", input.Identifier) err := s.db.SchedulableEntityRepo().Deactivate(ctx, models.SchedulableEntityKey{ - Project: input.Identifier.Project, - Domain: input.Identifier.Domain, - Name: input.Identifier.Name, - Version: input.Identifier.Version, + Project: input.Identifier.GetProject(), + Domain: input.Identifier.GetDomain(), + Name: input.Identifier.GetName(), + Version: input.Identifier.GetVersion(), }) if err != nil { diff --git a/flyteadmin/scheduler/identifier/identifier.go b/flyteadmin/scheduler/identifier/identifier.go index 5d386e8652..8db71863b7 100644 --- a/flyteadmin/scheduler/identifier/identifier.go +++ b/flyteadmin/scheduler/identifier/identifier.go @@ -44,7 +44,7 @@ func GetExecutionIdentifier(ctx context.Context, identifier *core.Identifier, sc func hashIdentifier(ctx context.Context, identifier *core.Identifier) uint64 { h := fnv.New64() _, err := h.Write([]byte(fmt.Sprintf(scheduleNameInputsFormat, - identifier.Project, identifier.Domain, identifier.Name, identifier.Version))) + identifier.GetProject(), identifier.GetDomain(), identifier.GetName(), identifier.GetVersion()))) if err != nil { // This shouldn't occur. logger.Errorf(ctx, @@ -59,7 +59,7 @@ func hashIdentifier(ctx context.Context, identifier *core.Identifier) uint64 { func hashScheduledTimeStamp(ctx context.Context, identifier *core.Identifier, scheduledTime time.Time) uint64 { h := fnv.New64() _, err := h.Write([]byte(fmt.Sprintf(executionIDInputsFormat, - identifier.Project, identifier.Domain, identifier.Name, identifier.Version, scheduledTime.Unix()))) + identifier.GetProject(), identifier.GetDomain(), identifier.GetName(), identifier.GetVersion(), scheduledTime.Unix()))) if err != nil { // This shouldn't occur. logger.Errorf(ctx, diff --git a/flytecopilot/.golangci.yml b/flytecopilot/.golangci.yml index 7f4dbc80e8..71a85ec5c3 100644 --- a/flytecopilot/.golangci.yml +++ b/flytecopilot/.golangci.yml @@ -1,35 +1,25 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - run: skip-dirs: - pkg/client - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - gci - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unparam - unused - - varcheck - + - protogetter linters-settings: gci: custom-order: true @@ -38,3 +28,5 @@ linters-settings: - default - prefix(github.com/flyteorg) skip-generated: true + goconst: + ignore-tests: true diff --git a/flytecopilot/cmd/download_test.go b/flytecopilot/cmd/download_test.go index 16cda7c67d..b96ffd46a6 100644 --- a/flytecopilot/cmd/download_test.go +++ b/flytecopilot/cmd/download_test.go @@ -182,8 +182,8 @@ func TestDownloadOptions_Download(t *testing.T) { errProto := &core.ErrorDocument{} err = store.ReadProtobuf(ctx, errFile, errProto) assert.NoError(t, err) - if assert.NotNil(t, errProto.Error) { - assert.Equal(t, core.ContainerError_RECOVERABLE, errProto.Error.Kind) + if assert.NotNil(t, errProto.GetError()) { + assert.Equal(t, core.ContainerError_RECOVERABLE, errProto.GetError().GetKind()) } }) } diff --git a/flytecopilot/cmd/sidecar.go b/flytecopilot/cmd/sidecar.go index 09abdb31e5..179d6362f8 100644 --- a/flytecopilot/cmd/sidecar.go +++ b/flytecopilot/cmd/sidecar.go @@ -70,9 +70,9 @@ func (u *UploadOptions) uploader(ctx context.Context) error { logger.Errorf(ctx, "Bad interface passed, failed to unmarshal err: %s", err) return errors.Wrap(err, "Bad interface passed, failed to unmarshal, expected core.TypedInterface") } - outputInterface := iface.Outputs + outputInterface := iface.GetOutputs() - if iface.Outputs == nil || iface.Outputs.Variables == nil || len(iface.Outputs.Variables) == 0 { + if iface.GetOutputs() == nil || iface.Outputs.Variables == nil || len(iface.GetOutputs().GetVariables()) == 0 { logger.Infof(ctx, "Empty output interface received. Assuming void outputs. Sidecar will exit immediately.") return nil } diff --git a/flytecopilot/cmd/sidecar_test.go b/flytecopilot/cmd/sidecar_test.go index a7cc1c964a..2932e6fa9c 100644 --- a/flytecopilot/cmd/sidecar_test.go +++ b/flytecopilot/cmd/sidecar_test.go @@ -87,7 +87,7 @@ func TestUploadOptions_Upload(t *testing.T) { } success := path.Join(tmpDir, SuccessFile) - assert.NoError(t, ioutil.WriteFile(success, []byte("done"), os.ModePerm)) + assert.NoError(t, os.WriteFile(success, []byte("done"), os.ModePerm)) // #nosec G306 ok, err := containerwatcher.FileExists(success) assert.NoError(t, err) assert.True(t, ok, "successfile not created") diff --git a/flytecopilot/data/download.go b/flytecopilot/data/download.go index e4efa22222..24450697e7 100644 --- a/flytecopilot/data/download.go +++ b/flytecopilot/data/download.go @@ -5,7 +5,6 @@ import ( "encoding/json" "fmt" "io" - "io/ioutil" "os" "path" "path/filepath" @@ -52,13 +51,13 @@ func (d Downloader) handleBlob(ctx context.Context, blob *core.Blob, toPath stri (download each part) (error on write or directory) (close streams safely, track success) (completion or report missing closures) */ - blobRef := storage.DataReference(blob.Uri) + blobRef := storage.DataReference(blob.GetUri()) scheme, _, _, err := blobRef.Split() if err != nil { return nil, errors.Wrapf(err, "Blob uri incorrectly formatted") } - if blob.GetMetadata().GetType().Dimensionality == core.BlobType_MULTIPART { + if blob.GetMetadata().GetType().GetDimensionality() == core.BlobType_MULTIPART { // Collect all parts of the multipart blob recursively (List API handles nested directories) // Set maxItems to 100 as a parameter for the List API, enabling batch retrieval of items until all are downloaded maxItems := 100 @@ -173,7 +172,7 @@ func (d Downloader) handleBlob(ctx context.Context, blob *core.Blob, toPath stri } logger.Infof(ctx, "successfully copied %d remote files from [%s] to local [%s]", downloadSuccess, blobRef, toPath) return toPath, nil - } else if blob.GetMetadata().GetType().Dimensionality == core.BlobType_SINGLE { + } else if blob.GetMetadata().GetType().GetDimensionality() == core.BlobType_SINGLE { // reader should be declared here (avoid being shared across all goroutines) var reader io.ReadCloser if scheme == "http" || scheme == "https" { @@ -214,14 +213,14 @@ func (d Downloader) handleBlob(ctx context.Context, blob *core.Blob, toPath stri } func (d Downloader) handleSchema(ctx context.Context, schema *core.Schema, toFilePath string) (interface{}, error) { - return d.handleBlob(ctx, &core.Blob{Uri: schema.Uri, Metadata: &core.BlobMetadata{Type: &core.BlobType{Dimensionality: core.BlobType_MULTIPART}}}, toFilePath) + return d.handleBlob(ctx, &core.Blob{Uri: schema.GetUri(), Metadata: &core.BlobMetadata{Type: &core.BlobType{Dimensionality: core.BlobType_MULTIPART}}}, toFilePath) } func (d Downloader) handleBinary(_ context.Context, b *core.Binary, toFilePath string, writeToFile bool) (interface{}, error) { // maybe we should return a map v := b.GetValue() if writeToFile { - return v, ioutil.WriteFile(toFilePath, v, os.ModePerm) + return v, os.WriteFile(toFilePath, v, os.ModePerm) // #nosec G306 } return v, nil } @@ -229,9 +228,9 @@ func (d Downloader) handleBinary(_ context.Context, b *core.Binary, toFilePath s func (d Downloader) handleError(_ context.Context, b *core.Error, toFilePath string, writeToFile bool) (interface{}, error) { // maybe we should return a map if writeToFile { - return b.Message, ioutil.WriteFile(toFilePath, []byte(b.Message), os.ModePerm) + return b.GetMessage(), os.WriteFile(toFilePath, []byte(b.GetMessage()), os.ModePerm) // #nosec G306 } - return b.Message, nil + return b.GetMessage(), nil } func (d Downloader) handleGeneric(ctx context.Context, b *structpb.Struct, toFilePath string, writeToFile bool) (interface{}, error) { @@ -259,7 +258,7 @@ func (d Downloader) handlePrimitive(primitive *core.Primitive, toFilePath string var v interface{} var err error - switch primitive.Value.(type) { + switch primitive.GetValue().(type) { case *core.Primitive_StringValue: v = primitive.GetStringValue() toByteArray = func() ([]byte, error) { @@ -307,7 +306,7 @@ func (d Downloader) handlePrimitive(primitive *core.Primitive, toFilePath string if err != nil { return nil, err } - return v, ioutil.WriteFile(toFilePath, b, os.ModePerm) + return v, os.WriteFile(toFilePath, b, os.ModePerm) // #nosec G306 } return v, nil } @@ -321,11 +320,11 @@ func (d Downloader) handleScalar(ctx context.Context, scalar *core.Scalar, toFil case *core.Scalar_Blob: b := scalar.GetBlob() i, err := d.handleBlob(ctx, b, toFilePath) - return i, &core.Scalar{Value: &core.Scalar_Blob{Blob: &core.Blob{Metadata: b.Metadata, Uri: toFilePath}}}, err + return i, &core.Scalar{Value: &core.Scalar_Blob{Blob: &core.Blob{Metadata: b.GetMetadata(), Uri: toFilePath}}}, err case *core.Scalar_Schema: b := scalar.GetSchema() i, err := d.handleSchema(ctx, b, toFilePath) - return i, &core.Scalar{Value: &core.Scalar_Schema{Schema: &core.Schema{Type: b.Type, Uri: toFilePath}}}, err + return i, &core.Scalar{Value: &core.Scalar_Schema{Schema: &core.Schema{Type: b.GetType(), Uri: toFilePath}}}, err case *core.Scalar_Binary: b := scalar.GetBinary() i, err := d.handleBinary(ctx, b, toFilePath, writeToFile) @@ -340,7 +339,7 @@ func (d Downloader) handleScalar(ctx context.Context, scalar *core.Scalar, toFil return i, scalar, err case *core.Scalar_NoneType: if writeToFile { - return nil, scalar, ioutil.WriteFile(toFilePath, []byte("null"), os.ModePerm) + return nil, scalar, os.WriteFile(toFilePath, []byte("null"), os.ModePerm) // #nosec G306 } return nil, scalar, nil default: @@ -359,6 +358,10 @@ func (d Downloader) handleLiteral(ctx context.Context, lit *core.Literal, filePa Scalar: s, }}, nil case *core.Literal_Collection: + err := os.MkdirAll(filePath, os.ModePerm) + if err != nil { + return nil, nil, errors.Wrapf(err, "failed to create directory [%s]", filePath) + } v, c2, err := d.handleCollection(ctx, lit.GetCollection(), filePath, writeToFile) if err != nil { return nil, nil, err @@ -367,6 +370,10 @@ func (d Downloader) handleLiteral(ctx context.Context, lit *core.Literal, filePa Collection: c2, }}, nil case *core.Literal_Map: + err := os.MkdirAll(filePath, os.ModePerm) + if err != nil { + return nil, nil, errors.Wrapf(err, "failed to create directory [%s]", filePath) + } v, m, err := d.RecursiveDownload(ctx, lit.GetMap(), filePath, writeToFile) if err != nil { return nil, nil, err @@ -381,12 +388,12 @@ func (d Downloader) handleLiteral(ctx context.Context, lit *core.Literal, filePa // Collection should be stored as a top level list file and may have accompanying files? func (d Downloader) handleCollection(ctx context.Context, c *core.LiteralCollection, dir string, writePrimitiveToFile bool) ([]interface{}, *core.LiteralCollection, error) { - if c == nil || len(c.Literals) == 0 { + if c == nil || len(c.GetLiterals()) == 0 { return []interface{}{}, c, nil } var collection []interface{} litCollection := &core.LiteralCollection{} - for i, lit := range c.Literals { + for i, lit := range c.GetLiterals() { filePath := path.Join(dir, strconv.Itoa(i)) v, lit, err := d.handleLiteral(ctx, lit, filePath, writePrimitiveToFile) if err != nil { @@ -406,11 +413,21 @@ type downloadedResult struct { func (d Downloader) RecursiveDownload(ctx context.Context, inputs *core.LiteralMap, dir string, writePrimitiveToFile bool) (VarMap, *core.LiteralMap, error) { childCtx, cancel := context.WithCancel(ctx) defer cancel() - if inputs == nil || len(inputs.Literals) == 0 { + if inputs == nil || len(inputs.GetLiterals()) == 0 { return VarMap{}, nil, nil } - f := make(FutureMap, len(inputs.Literals)) - for variable, literal := range inputs.Literals { + f := make(FutureMap, len(inputs.GetLiterals())) + for variable, literal := range inputs.GetLiterals() { + if literal.GetOffloadedMetadata() != nil { + offloadedMetadataURI := literal.GetOffloadedMetadata().GetUri() + // literal will be overwritten with the contents of the offloaded data which contains the actual large literal. + if err := d.store.ReadProtobuf(ctx, storage.DataReference(offloadedMetadataURI), literal); err != nil { + errString := fmt.Sprintf("Failed to read the object at location [%s] with error [%s]", offloadedMetadataURI, err) + logger.Error(ctx, errString) + return nil, nil, fmt.Errorf("%s", errString) + } + logger.Infof(ctx, "read object at location [%s]", offloadedMetadataURI) + } varPath := path.Join(dir, variable) lit := literal f[variable] = futures.NewAsyncFuture(childCtx, func(ctx2 context.Context) (interface{}, error) { @@ -468,7 +485,8 @@ func (d Downloader) DownloadInputs(ctx context.Context, inputRef storage.DataRef if err != nil { return err } - if err := ioutil.WriteFile(path.Join(outputDir, "inputs.pb"), b, os.ModePerm); err != nil { + // #nosec G306 + if err := os.WriteFile(path.Join(outputDir, "inputs.pb"), b, os.ModePerm); err != nil { return err } @@ -477,14 +495,14 @@ func (d Downloader) DownloadInputs(ctx context.Context, inputRef storage.DataRef if err != nil { return errors.Wrapf(err, "failed to marshal out inputs") } - return ioutil.WriteFile(path.Join(outputDir, "inputs.json"), m, os.ModePerm) + return os.WriteFile(path.Join(outputDir, "inputs.json"), m, os.ModePerm) // #nosec G306 } if d.format == core.DataLoadingConfig_YAML { m, err := yaml.Marshal(varMap) if err != nil { return errors.Wrapf(err, "failed to marshal out inputs") } - return ioutil.WriteFile(path.Join(outputDir, "inputs.yaml"), m, os.ModePerm) + return os.WriteFile(path.Join(outputDir, "inputs.yaml"), m, os.ModePerm) // #nosec G306 } return nil } diff --git a/flytecopilot/data/download_test.go b/flytecopilot/data/download_test.go index 1f3b3a7be6..dbc7cb33e7 100644 --- a/flytecopilot/data/download_test.go +++ b/flytecopilot/data/download_test.go @@ -7,11 +7,11 @@ import ( "path/filepath" "testing" + "github.com/stretchr/testify/assert" + "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" "github.com/flyteorg/flyte/flytestdlib/promutils" "github.com/flyteorg/flyte/flytestdlib/storage" - - "github.com/stretchr/testify/assert" ) func TestHandleBlobMultipart(t *testing.T) { @@ -19,9 +19,11 @@ func TestHandleBlobMultipart(t *testing.T) { s, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) assert.NoError(t, err) ref := storage.DataReference("s3://container/folder/file1") - s.WriteRaw(context.Background(), ref, 0, storage.Options{}, bytes.NewReader([]byte{})) + err = s.WriteRaw(context.Background(), ref, 0, storage.Options{}, bytes.NewReader([]byte{})) + assert.NoError(t, err) ref = storage.DataReference("s3://container/folder/file2") - s.WriteRaw(context.Background(), ref, 0, storage.Options{}, bytes.NewReader([]byte{})) + err = s.WriteRaw(context.Background(), ref, 0, storage.Options{}, bytes.NewReader([]byte{})) + assert.NoError(t, err) d := Downloader{store: s} @@ -87,7 +89,8 @@ func TestHandleBlobSinglePart(t *testing.T) { s, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) assert.NoError(t, err) ref := storage.DataReference("s3://container/file") - s.WriteRaw(context.Background(), ref, 0, storage.Options{}, bytes.NewReader([]byte{})) + err = s.WriteRaw(context.Background(), ref, 0, storage.Options{}, bytes.NewReader([]byte{})) + assert.NoError(t, err) d := Downloader{store: s} @@ -149,3 +152,162 @@ func TestHandleBlobHTTP(t *testing.T) { t.Errorf("expected file %s to exist", toPath) } } + +func TestRecursiveDownload(t *testing.T) { + t.Run("OffloadedMetadataContainsCollectionOfStrings", func(t *testing.T) { + s, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) + assert.NoError(t, err) + + d := Downloader{store: s} + + offloadedLiteral := &core.Literal{ + Value: &core.Literal_OffloadedMetadata{ + OffloadedMetadata: &core.LiteralOffloadedMetadata{ + Uri: "s3://container/offloaded", + }, + }, + } + + inputs := &core.LiteralMap{ + Literals: map[string]*core.Literal{ + "input1": offloadedLiteral, + }, + } + + // Mock reading the offloaded metadata + err = s.WriteProtobuf(context.Background(), storage.DataReference("s3://container/offloaded"), storage.Options{}, &core.Literal{ + Value: &core.Literal_Collection{ + Collection: &core.LiteralCollection{ + Literals: []*core.Literal{ + { + Value: &core.Literal_Scalar{ + Scalar: &core.Scalar{ + Value: &core.Scalar_Primitive{ + Primitive: &core.Primitive{ + Value: &core.Primitive_StringValue{ + StringValue: "string1", + }, + }, + }, + }, + }, + }, + { + Value: &core.Literal_Scalar{ + Scalar: &core.Scalar{ + Value: &core.Scalar_Primitive{ + Primitive: &core.Primitive{ + Value: &core.Primitive_StringValue{ + StringValue: "string2", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }) + assert.NoError(t, err) + + toPath := "./inputs" + defer func() { + err := os.RemoveAll(toPath) + if err != nil { + t.Errorf("Failed to delete directory: %v", err) + } + }() + + varMap, lMap, err := d.RecursiveDownload(context.Background(), inputs, toPath, true) + assert.NoError(t, err) + assert.NotNil(t, varMap) + assert.NotNil(t, lMap) + assert.Equal(t, []interface{}{"string1", "string2"}, varMap["input1"]) + // Check if files were created and data written + for _, file := range []string{"0", "1"} { + if _, err := os.Stat(filepath.Join(toPath, "input1", file)); os.IsNotExist(err) { + t.Errorf("expected file %s to exist", file) + } + } + }) + + t.Run("OffloadedMetadataContainsMapOfStringString", func(t *testing.T) { + s, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) + assert.NoError(t, err) + + d := Downloader{store: s} + + offloadedLiteral := &core.Literal{ + Value: &core.Literal_OffloadedMetadata{ + OffloadedMetadata: &core.LiteralOffloadedMetadata{ + Uri: "s3://container/offloaded", + }, + }, + } + + inputs := &core.LiteralMap{ + Literals: map[string]*core.Literal{ + "input1": offloadedLiteral, + }, + } + + // Mock reading the offloaded metadata + err = s.WriteProtobuf(context.Background(), storage.DataReference("s3://container/offloaded"), storage.Options{}, &core.Literal{ + Value: &core.Literal_Map{ + Map: &core.LiteralMap{ + Literals: map[string]*core.Literal{ + "key1": { + Value: &core.Literal_Scalar{ + Scalar: &core.Scalar{ + Value: &core.Scalar_Primitive{ + Primitive: &core.Primitive{ + Value: &core.Primitive_StringValue{ + StringValue: "value1", + }, + }, + }, + }, + }, + }, + "key2": { + Value: &core.Literal_Scalar{ + Scalar: &core.Scalar{ + Value: &core.Scalar_Primitive{ + Primitive: &core.Primitive{ + Value: &core.Primitive_StringValue{ + StringValue: "value2", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }) + assert.NoError(t, err) + + toPath := "./inputs" + defer func() { + err := os.RemoveAll(toPath) + if err != nil { + t.Errorf("Failed to delete directory: %v", err) + } + }() + + varMap, lMap, err := d.RecursiveDownload(context.Background(), inputs, toPath, true) + assert.NoError(t, err) + assert.NotNil(t, varMap) + assert.NotNil(t, lMap) + assert.Equal(t, "value1", varMap["input1"].(VarMap)["key1"]) + assert.Equal(t, "value2", varMap["input1"].(VarMap)["key2"]) + + for _, file := range []string{"key1", "key2"} { + if _, err := os.Stat(filepath.Join(toPath, "input1", file)); os.IsNotExist(err) { + t.Errorf("expected file %s to exist", file) + } + } + }) +} diff --git a/flytecopilot/data/upload.go b/flytecopilot/data/upload.go index 6cb3831b4c..2103028d28 100644 --- a/flytecopilot/data/upload.go +++ b/flytecopilot/data/upload.go @@ -135,8 +135,8 @@ func (u Uploader) RecursiveUpload(ctx context.Context, vars *core.VariableMap, f return errors.Errorf("User Error: %s", string(b)) } - varFutures := make(map[string]futures.Future, len(vars.Variables)) - for varName, variable := range vars.Variables { + varFutures := make(map[string]futures.Future, len(vars.GetVariables())) + for varName, variable := range vars.GetVariables() { varPath := path.Join(fromPath, varName) varType := variable.GetType() switch varType.GetType().(type) { diff --git a/flytecopilot/data/upload_test.go b/flytecopilot/data/upload_test.go index a98595a918..0d51d3c8df 100644 --- a/flytecopilot/data/upload_test.go +++ b/flytecopilot/data/upload_test.go @@ -3,7 +3,7 @@ package data import ( "context" "fmt" - "io/ioutil" + "io" "os" "path" "testing" @@ -21,7 +21,7 @@ func TestUploader_RecursiveUpload(t *testing.T) { tmpPrefix := "upload_test" t.Run("upload-blob", func(t *testing.T) { - tmpDir, err := ioutil.TempDir(tmpFolderLocation, tmpPrefix) + tmpDir, err := os.MkdirTemp(tmpFolderLocation, tmpPrefix) assert.NoError(t, err) defer func() { assert.NoError(t, os.RemoveAll(tmpDir)) @@ -36,7 +36,7 @@ func TestUploader_RecursiveUpload(t *testing.T) { } data := []byte("data") - assert.NoError(t, ioutil.WriteFile(path.Join(tmpDir, "x"), data, os.ModePerm)) + assert.NoError(t, os.WriteFile(path.Join(tmpDir, "x"), data, os.ModePerm)) // #nosec G306 fmt.Printf("Written to %s ", path.Join(tmpDir, "x")) store, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) @@ -49,15 +49,15 @@ func TestUploader_RecursiveUpload(t *testing.T) { outputs := &core.LiteralMap{} assert.NoError(t, store.ReadProtobuf(context.TODO(), outputRef, outputs)) - assert.Len(t, outputs.Literals, 1) - assert.NotNil(t, outputs.Literals["x"]) - assert.NotNil(t, outputs.Literals["x"].GetScalar()) - assert.NotNil(t, outputs.Literals["x"].GetScalar().GetBlob()) - ref := storage.DataReference(outputs.Literals["x"].GetScalar().GetBlob().GetUri()) + assert.Len(t, outputs.GetLiterals(), 1) + assert.NotNil(t, outputs.GetLiterals()["x"]) + assert.NotNil(t, outputs.GetLiterals()["x"].GetScalar()) + assert.NotNil(t, outputs.GetLiterals()["x"].GetScalar().GetBlob()) + ref := storage.DataReference(outputs.GetLiterals()["x"].GetScalar().GetBlob().GetUri()) r, err := store.ReadRaw(context.TODO(), ref) assert.NoError(t, err, "%s does not exist", ref) defer r.Close() - b, err := ioutil.ReadAll(r) + b, err := io.ReadAll(r) assert.NoError(t, err) assert.Equal(t, string(data), string(b), "content dont match") }) diff --git a/flytecopilot/data/utils_test.go b/flytecopilot/data/utils_test.go index 56cc3cc426..517f9d30ef 100644 --- a/flytecopilot/data/utils_test.go +++ b/flytecopilot/data/utils_test.go @@ -3,7 +3,6 @@ package data import ( "bytes" "context" - "io/ioutil" "os" "path" "testing" @@ -19,7 +18,7 @@ func TestIsFileReadable(t *testing.T) { tmpFolderLocation := "" tmpPrefix := "util_test" - tmpDir, err := ioutil.TempDir(tmpFolderLocation, tmpPrefix) + tmpDir, err := os.MkdirTemp(tmpFolderLocation, tmpPrefix) assert.NoError(t, err) defer func() { assert.NoError(t, os.RemoveAll(tmpDir)) @@ -30,7 +29,7 @@ func TestIsFileReadable(t *testing.T) { assert.Empty(t, f) assert.Nil(t, i) - assert.NoError(t, ioutil.WriteFile(p, []byte("data"), os.ModePerm)) + assert.NoError(t, os.WriteFile(p, []byte("data"), os.ModePerm)) // #nosec G306 f, i, err = IsFileReadable(p, false) assert.NoError(t, err) assert.Equal(t, p, f) @@ -42,7 +41,7 @@ func TestIsFileReadable(t *testing.T) { _, _, err = IsFileReadable(noExt, false) assert.Error(t, err) - assert.NoError(t, ioutil.WriteFile(p, []byte("data"), os.ModePerm)) + assert.NoError(t, os.WriteFile(p, []byte("data"), os.ModePerm)) // #nosec G306 _, _, err = IsFileReadable(noExt, false) assert.Error(t, err) @@ -57,7 +56,7 @@ func TestUploadFile(t *testing.T) { tmpFolderLocation := "" tmpPrefix := "util_test" - tmpDir, err := ioutil.TempDir(tmpFolderLocation, tmpPrefix) + tmpDir, err := os.MkdirTemp(tmpFolderLocation, tmpPrefix) assert.NoError(t, err) defer func() { assert.NoError(t, os.RemoveAll(tmpDir)) @@ -66,7 +65,7 @@ func TestUploadFile(t *testing.T) { exist := path.Join(tmpDir, "exist-file") data := []byte("data") l := int64(len(data)) - assert.NoError(t, ioutil.WriteFile(exist, data, os.ModePerm)) + assert.NoError(t, os.WriteFile(exist, data, os.ModePerm)) // #nosec G306 nonExist := path.Join(tmpDir, "non-exist-file") store, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) diff --git a/flytectl/.golangci.yml b/flytectl/.golangci.yml index b7e8525336..12de11bf56 100644 --- a/flytectl/.golangci.yml +++ b/flytectl/.golangci.yml @@ -1,37 +1,30 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - run: skip-dirs: - pkg/client - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - gci - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unparam - unused - - varcheck + - protogetter linters-settings: gci: skip-generated: true + goconst: + ignore-tests: true issues: exclude: - copylocks diff --git a/flytectl/cmd/compile/compile.go b/flytectl/cmd/compile/compile.go index 22c4796e1d..7c5adcf56f 100644 --- a/flytectl/cmd/compile/compile.go +++ b/flytectl/cmd/compile/compile.go @@ -61,16 +61,16 @@ func compileFromPackage(packagePath string) error { case *admin.TaskSpec: tasks = append(tasks, v) case *admin.WorkflowSpec: - workflows[v.Template.Id.Name] = v + workflows[v.GetTemplate().GetId().GetName()] = v case *admin.LaunchPlan: - plans[v.Id.Name] = v + plans[v.GetId().GetName()] = v } } // compile tasks taskTemplates := []*core.TaskTemplate{} for _, task := range tasks { - taskTemplates = append(taskTemplates, task.Template) + taskTemplates = append(taskTemplates, task.GetTemplate()) } fmt.Println("\nCompiling tasks...") @@ -107,13 +107,13 @@ func handleWorkflow( compiledLaunchPlanProviders []common.InterfaceProvider, plans map[string]*admin.LaunchPlan, workflows map[string]*admin.WorkflowSpec) ([]common.InterfaceProvider, error) { - reqs, _ := compiler.GetRequirements(workflow.Template, workflow.SubWorkflows) - wfName := workflow.Template.Id.Name + reqs, _ := compiler.GetRequirements(workflow.GetTemplate(), workflow.GetSubWorkflows()) + wfName := workflow.GetTemplate().GetId().GetName() // Check if all the subworkflows referenced by launchplan are compiled for i := range reqs.GetRequiredLaunchPlanIds() { lpID := reqs.GetRequiredLaunchPlanIds()[i] - lpWfName := plans[lpID.Name].Spec.WorkflowId.Name + lpWfName := plans[lpID.GetName()].GetSpec().GetWorkflowId().GetName() missingWorkflow := workflows[lpWfName] if compiledWorkflows[lpWfName] == nil { // Recursively compile the missing workflow first @@ -127,8 +127,8 @@ func handleWorkflow( fmt.Println("\nCompiling workflow:", wfName) - wf, err := compiler.CompileWorkflow(workflow.Template, - workflow.SubWorkflows, + wf, err := compiler.CompileWorkflow(workflow.GetTemplate(), + workflow.GetSubWorkflows(), compiledTasks, compiledLaunchPlanProviders) @@ -140,11 +140,11 @@ func handleWorkflow( // Update the expected inputs and outputs for the launchplans which reference this workflow for _, plan := range plans { - if plan.Spec.WorkflowId.Name == wfName { - plan.Closure.ExpectedOutputs = wf.Primary.Template.Interface.Outputs + if plan.GetSpec().GetWorkflowId().GetName() == wfName { + plan.Closure.ExpectedOutputs = wf.GetPrimary().GetTemplate().GetInterface().GetOutputs() newMap := make(map[string]*core.Parameter) - for key, value := range wf.Primary.Template.Interface.Inputs.Variables { + for key, value := range wf.GetPrimary().GetTemplate().GetInterface().GetInputs().GetVariables() { newMap[key] = &core.Parameter{ Var: value, } diff --git a/flytectl/cmd/config/subcommand/project/project_config.go b/flytectl/cmd/config/subcommand/project/project_config.go index d76030faa7..fd71628448 100644 --- a/flytectl/cmd/config/subcommand/project/project_config.go +++ b/flytectl/cmd/config/subcommand/project/project_config.go @@ -73,18 +73,18 @@ func (c *ConfigProject) GetProjectSpec(cf *config.Config) (*admin.Project, error } project := cf.Project - if len(projectSpec.Id) == 0 && len(project) == 0 { - err := fmt.Errorf(clierrors.ErrProjectNotPassed) + if len(projectSpec.GetId()) == 0 && len(project) == 0 { + err := fmt.Errorf(clierrors.ErrProjectNotPassed) //nolint return nil, err } - if len(projectSpec.Id) > 0 && len(project) > 0 { - err := fmt.Errorf(clierrors.ErrProjectIDBothPassed) + if len(projectSpec.GetId()) > 0 && len(project) > 0 { + err := fmt.Errorf(clierrors.ErrProjectIDBothPassed) //nolint return nil, err } // Get projectId from file, if not provided, fall back to project - if len(projectSpec.Id) == 0 { + if len(projectSpec.GetId()) == 0 { projectSpec.Id = project } return &projectSpec, nil @@ -104,7 +104,7 @@ func (c *ConfigProject) MapToAdminState() (admin.Project_ProjectState, error) { if activate || archive { if activate == archive { - return admin.Project_ACTIVE, fmt.Errorf(clierrors.ErrInvalidStateUpdate) + return admin.Project_ACTIVE, fmt.Errorf(clierrors.ErrInvalidStateUpdate) //nolint } if archive { return admin.Project_ARCHIVED, nil diff --git a/flytectl/cmd/config/subcommand/project/project_config_test.go b/flytectl/cmd/config/subcommand/project/project_config_test.go index bbaf521375..b111cace09 100644 --- a/flytectl/cmd/config/subcommand/project/project_config_test.go +++ b/flytectl/cmd/config/subcommand/project/project_config_test.go @@ -20,7 +20,7 @@ func TestGetProjectSpec(t *testing.T) { } response, err := c.GetProjectSpec(cf) assert.Nil(t, err) - assert.Equal(t, "flytesnacks1", response.Id) + assert.Equal(t, "flytesnacks1", response.GetId()) }) t.Run("Error if project and ID both exist", func(t *testing.T) { @@ -38,8 +38,8 @@ func TestGetProjectSpec(t *testing.T) { } response, err := c.GetProjectSpec(&config.Config{}) assert.Nil(t, err) - assert.Equal(t, "flytesnacks", response.Name) - assert.Equal(t, "flytesnacks test", response.Description) + assert.Equal(t, "flytesnacks", response.GetName()) + assert.Equal(t, "flytesnacks test", response.GetDescription()) }) } diff --git a/flytectl/cmd/config/subcommand/project_domain_workflow_getter.go b/flytectl/cmd/config/subcommand/project_domain_workflow_getter.go index d8fd83f043..fbf43964bd 100644 --- a/flytectl/cmd/config/subcommand/project_domain_workflow_getter.go +++ b/flytectl/cmd/config/subcommand/project_domain_workflow_getter.go @@ -33,7 +33,7 @@ func (g PDWGetterCommandLine) GetDomain() string { // GetWorkflow returns the first argument from the commandline func (g PDWGetterCommandLine) GetWorkflow() string { - if g.Args == nil || len(g.Args) == 0 { + if len(g.Args) == 0 { return "" } return g.Args[0] diff --git a/flytectl/cmd/config/subcommand/sandbox/config_flags.go b/flytectl/cmd/config/subcommand/sandbox/config_flags.go index 32e1423057..4359b5db3c 100755 --- a/flytectl/cmd/config/subcommand/sandbox/config_flags.go +++ b/flytectl/cmd/config/subcommand/sandbox/config_flags.go @@ -62,5 +62,6 @@ func (cfg Config) GetPFlagSet(prefix string) *pflag.FlagSet { cmdFlags.BoolVar(&DefaultConfig.Dev, fmt.Sprintf("%v%v", prefix, "dev"), DefaultConfig.Dev, "Optional. Only start minio and postgres in the sandbox.") cmdFlags.BoolVar(&DefaultConfig.DryRun, fmt.Sprintf("%v%v", prefix, "dryRun"), DefaultConfig.DryRun, "Optional. Only print the docker commands to bring up flyte sandbox/demo container.This will still call github api's to get the latest flyte release to use'") cmdFlags.BoolVar(&DefaultConfig.Force, fmt.Sprintf("%v%v", prefix, "force"), DefaultConfig.Force, "Optional. Forcefully delete existing sandbox cluster if it exists.") + cmdFlags.StringVar(&DefaultConfig.Port, fmt.Sprintf("%v%v", prefix, "port"), DefaultConfig.Port, "Optional. Specify the port for the Kubernetes in the sandbox.") return cmdFlags } diff --git a/flytectl/cmd/config/subcommand/sandbox/config_flags_test.go b/flytectl/cmd/config/subcommand/sandbox/config_flags_test.go index 8519a75583..436cdad43a 100755 --- a/flytectl/cmd/config/subcommand/sandbox/config_flags_test.go +++ b/flytectl/cmd/config/subcommand/sandbox/config_flags_test.go @@ -265,4 +265,18 @@ func TestConfig_SetFlags(t *testing.T) { } }) }) + t.Run("Test_port", func(t *testing.T) { + + t.Run("Override", func(t *testing.T) { + testValue := "1" + + cmdFlags.Set("port", testValue) + if vString, err := cmdFlags.GetString("port"); err == nil { + testDecodeJson_Config(t, fmt.Sprintf("%v", vString), &actual.Port) + + } else { + assert.FailNow(t, err.Error()) + } + }) + }) } diff --git a/flytectl/cmd/config/subcommand/sandbox/sandbox_config.go b/flytectl/cmd/config/subcommand/sandbox/sandbox_config.go index 47ae4918d5..a8d097ea46 100644 --- a/flytectl/cmd/config/subcommand/sandbox/sandbox_config.go +++ b/flytectl/cmd/config/subcommand/sandbox/sandbox_config.go @@ -1,6 +1,10 @@ package sandbox -import "github.com/flyteorg/flyte/flytectl/pkg/docker" +import ( + "fmt" + + "github.com/flyteorg/flyte/flytectl/pkg/docker" +) // Config holds configuration flags for sandbox command. type Config struct { @@ -36,9 +40,18 @@ type Config struct { DryRun bool `json:"dryRun" pflag:",Optional. Only print the docker commands to bring up flyte sandbox/demo container.This will still call github api's to get the latest flyte release to use'"` Force bool `json:"force" pflag:",Optional. Forcefully delete existing sandbox cluster if it exists."` + + // Allow user to specify the port for the sandbox + Port string `json:"port" pflag:",Optional. Specify the port for the Kubernetes in the sandbox."` } //go:generate pflags Config --default-var DefaultConfig --bind-default-var var ( - DefaultConfig = &Config{} + DefaultConfig = &Config{ + Port: "6443", // Default port for the sandbox + } ) + +func (c Config) GetK8sEndpoint() string { + return fmt.Sprintf("https://127.0.0.1:%s", c.Port) +} diff --git a/flytectl/cmd/create/execution.go b/flytectl/cmd/create/execution.go index 5da311357b..ff8b57fea8 100644 --- a/flytectl/cmd/create/execution.go +++ b/flytectl/cmd/create/execution.go @@ -260,7 +260,7 @@ func createExecutionCommand(ctx context.Context, args []string, cmdCtx cmdCore.C if _err != nil { return _err } - fmt.Printf("execution identifier %v\n", exec.Id) + fmt.Printf("execution identifier %v\n", exec.GetId()) } return nil } diff --git a/flytectl/cmd/create/execution_test.go b/flytectl/cmd/create/execution_test.go index d01b683e02..59fb2b0213 100644 --- a/flytectl/cmd/create/execution_test.go +++ b/flytectl/cmd/create/execution_test.go @@ -264,7 +264,7 @@ func (s *createSuite) Test_CreateRelaunchExecution() { Name: "f652ea3596e7f4d80a0e", }, } - executionConfig.Relaunch = relaunchExecResponse.Id.Name + executionConfig.Relaunch = relaunchExecResponse.GetId().GetName() relaunchRequest := &admin.ExecutionRelaunchRequest{ Id: &core.WorkflowExecutionIdentifier{ Name: executionConfig.Relaunch, diff --git a/flytectl/cmd/create/execution_util.go b/flytectl/cmd/create/execution_util.go index bcb5c5639f..4b5813ec1e 100644 --- a/flytectl/cmd/create/execution_util.go +++ b/flytectl/cmd/create/execution_util.go @@ -51,7 +51,7 @@ func createExecutionRequestForWorkflow(ctx context.Context, workflowName, projec } } - return createExecutionRequest(lp.Id, inputs, envs, securityContext, authRole, targetExecName, executionConfig.TargetExecutionCluster), nil + return createExecutionRequest(lp.GetId(), inputs, envs, securityContext, authRole, targetExecName, executionConfig.TargetExecutionCluster), nil } func createExecutionRequestForTask(ctx context.Context, taskName string, project string, domain string, @@ -95,8 +95,8 @@ func createExecutionRequestForTask(ctx context.Context, taskName string, project ResourceType: core.ResourceType_TASK, Project: project, Domain: domain, - Name: task.Id.Name, - Version: task.Id.Version, + Name: task.GetId().GetName(), + Version: task.GetId().GetVersion(), } return createExecutionRequest(id, inputs, envs, securityContext, authRole, targetExecName, executionConfig.TargetExecutionCluster), nil @@ -120,7 +120,7 @@ func relaunchExecution(ctx context.Context, executionName string, project string if err != nil { return err } - fmt.Printf("execution identifier %v\n", relaunchedExec.Id) + fmt.Printf("execution identifier %v\n", relaunchedExec.GetId()) return nil } @@ -141,7 +141,7 @@ func recoverExecution(ctx context.Context, executionName string, project string, if err != nil { return err } - fmt.Printf("execution identifier %v\n", recoveredExec.Id) + fmt.Printf("execution identifier %v\n", recoveredExec.GetId()) return nil } diff --git a/flytectl/cmd/create/execution_util_test.go b/flytectl/cmd/create/execution_util_test.go index e27ba4a96b..c77c1c194b 100644 --- a/flytectl/cmd/create/execution_util_test.go +++ b/flytectl/cmd/create/execution_util_test.go @@ -134,7 +134,7 @@ func TestCreateExecutionRequestForWorkflow(t *testing.T) { execCreateRequest, err := createExecutionRequestForWorkflow(s.Ctx, "wfName", config.GetConfig().Project, config.GetConfig().Domain, s.CmdCtx, executionConfigWithEnvs, "") assert.Nil(t, err) assert.NotNil(t, execCreateRequest) - assert.Equal(t, "cluster", execCreateRequest.Spec.ExecutionClusterLabel.Value) + assert.Equal(t, "cluster", execCreateRequest.GetSpec().GetExecutionClusterLabel().GetValue()) }) t.Run("failed literal conversion", func(t *testing.T) { s := testutils.Setup(t) diff --git a/flytectl/cmd/create/project.go b/flytectl/cmd/create/project.go index fa1046a248..777ec7a5db 100644 --- a/flytectl/cmd/create/project.go +++ b/flytectl/cmd/create/project.go @@ -49,11 +49,11 @@ func createProjectsCommand(ctx context.Context, args []string, cmdCtx cmdCore.Co if err != nil { return err } - if projectSpec.Id == "" { - return fmt.Errorf(clierrors.ErrProjectNotPassed) + if projectSpec.GetId() == "" { + return fmt.Errorf(clierrors.ErrProjectNotPassed) //nolint } - if projectSpec.Name == "" { - return fmt.Errorf(clierrors.ErrProjectNameNotPassed) + if projectSpec.GetName() == "" { + return fmt.Errorf(clierrors.ErrProjectNameNotPassed) //nolint } if project.DefaultProjectConfig.DryRun { @@ -61,10 +61,10 @@ func createProjectsCommand(ctx context.Context, args []string, cmdCtx cmdCore.Co } else { _, err := cmdCtx.AdminClient().RegisterProject(ctx, &admin.ProjectRegisterRequest{ Project: &admin.Project{ - Id: projectSpec.Id, - Name: projectSpec.Name, - Description: projectSpec.Description, - Labels: projectSpec.Labels, + Id: projectSpec.GetId(), + Name: projectSpec.GetName(), + Description: projectSpec.GetDescription(), + Labels: projectSpec.GetLabels(), }, }) if err != nil { diff --git a/flytectl/cmd/demo/demo.go b/flytectl/cmd/demo/demo.go index 12dcb66fc3..72f0a07ef8 100644 --- a/flytectl/cmd/demo/demo.go +++ b/flytectl/cmd/demo/demo.go @@ -6,11 +6,6 @@ import ( "github.com/spf13/cobra" ) -const ( - flyteNs = "flyte" - K8sEndpoint = "https://127.0.0.1:6443" -) - // Long descriptions are whitespace sensitive when generating docs using sphinx. const ( demoShort = `Helps with demo interactions like start, teardown, status, and exec.` diff --git a/flytectl/cmd/demo/reload.go b/flytectl/cmd/demo/reload.go index dee3086e35..92f06d77df 100644 --- a/flytectl/cmd/demo/reload.go +++ b/flytectl/cmd/demo/reload.go @@ -4,16 +4,14 @@ import ( "context" "fmt" + sandboxCmdConfig "github.com/flyteorg/flyte/flytectl/cmd/config/subcommand/sandbox" cmdCore "github.com/flyteorg/flyte/flytectl/cmd/core" "github.com/flyteorg/flyte/flytectl/pkg/docker" - "github.com/flyteorg/flyte/flytectl/pkg/k8s" - "github.com/flyteorg/flyte/flytestdlib/logger" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "github.com/flyteorg/flyte/flytectl/pkg/sandbox" ) const ( internalBootstrapAgent = "flyte-sandbox-bootstrap" - labelSelector = "app.kubernetes.io/name=flyte-binary" ) const ( reloadShort = "Power cycle the Flyte executable pod, effectively picking up an updated config." @@ -73,7 +71,7 @@ func reloadDemoCluster(ctx context.Context, args []string, cmdCtx cmdCore.Comman return err } if useLegacyMethod { - return legacyReloadDemoCluster(ctx) + return sandbox.LegacyReloadDemoCluster(ctx, sandboxCmdConfig.DefaultConfig) } // At this point we know that we are on a modern sandbox, and we can use the @@ -88,32 +86,3 @@ func reloadDemoCluster(ctx context.Context, args []string, cmdCtx cmdCore.Comman return nil } - -// legacyReloadDemoCluster will kill the flyte binary pod so the new one can pick up a new config file -func legacyReloadDemoCluster(ctx context.Context) error { - k8sClient, err := k8s.GetK8sClient(docker.Kubeconfig, K8sEndpoint) - if err != nil { - fmt.Println("Could not get K8s client") - return err - } - pi := k8sClient.CoreV1().Pods(flyteNs) - podList, err := pi.List(ctx, v1.ListOptions{LabelSelector: labelSelector}) - if err != nil { - fmt.Println("could not list pods") - return err - } - if len(podList.Items) != 1 { - return fmt.Errorf("should only have one pod running, %d found, %v", len(podList.Items), podList.Items) - } - logger.Debugf(ctx, "Found %d pods\n", len(podList.Items)) - var grace = int64(0) - err = pi.Delete(ctx, podList.Items[0].Name, v1.DeleteOptions{ - GracePeriodSeconds: &grace, - }) - if err != nil { - fmt.Printf("Could not delete Flyte pod, old configuration may still be in effect. Err: %s\n", err) - return err - } - - return nil -} diff --git a/flytectl/cmd/demo/start.go b/flytectl/cmd/demo/start.go index fa3de39101..234d203ca3 100644 --- a/flytectl/cmd/demo/start.go +++ b/flytectl/cmd/demo/start.go @@ -20,6 +20,11 @@ Starts the demo cluster without any source code: flytectl demo start +Starts the demo cluster with different port: +:: + + flytectl demo start --port 6443 + Runs a dev cluster, which only has minio and postgres pod. :: diff --git a/flytectl/cmd/get/execution.go b/flytectl/cmd/get/execution.go index 8bfcc36e00..dca51dd9a3 100644 --- a/flytectl/cmd/get/execution.go +++ b/flytectl/cmd/get/execution.go @@ -118,7 +118,7 @@ func getCallBack(ctx context.Context, cmdCtx cmdCore.CommandContext) bubbletea.D if err != nil { return nil, err } - return ExecutionToProtoMessages(executionList.Executions), nil + return ExecutionToProtoMessages(executionList.GetExecutions()), nil } } @@ -160,7 +160,7 @@ func getExecutionFunc(ctx context.Context, args []string, cmdCtx cmdCore.Command if err != nil { return err } - logger.Infof(ctx, "Retrieved %v executions", len(executionList.Executions)) + logger.Infof(ctx, "Retrieved %v executions", len(executionList.GetExecutions())) return adminPrinter.Print(config.GetConfig().MustOutputFormat(), executionColumns, - ExecutionToProtoMessages(executionList.Executions)...) + ExecutionToProtoMessages(executionList.GetExecutions())...) } diff --git a/flytectl/cmd/get/execution_util.go b/flytectl/cmd/get/execution_util.go index 45214a7ac8..ea99872f9b 100644 --- a/flytectl/cmd/get/execution_util.go +++ b/flytectl/cmd/get/execution_util.go @@ -43,7 +43,7 @@ func WriteExecConfigToFile(executionConfig ExecutionConfig, fileName string) err func CreateAndWriteExecConfigForTask(task *admin.Task, fileName string) error { var err error - executionConfig := ExecutionConfig{Task: task.Id.Name, Version: task.Id.Version} + executionConfig := ExecutionConfig{Task: task.GetId().GetName(), Version: task.GetId().GetVersion()} if executionConfig.Inputs, err = ParamMapForTask(task); err != nil { return err } @@ -52,7 +52,7 @@ func CreateAndWriteExecConfigForTask(task *admin.Task, fileName string) error { func CreateAndWriteExecConfigForWorkflow(wlp *admin.LaunchPlan, fileName string) error { var err error - executionConfig := ExecutionConfig{Workflow: wlp.Id.Name, Version: wlp.Id.Version} + executionConfig := ExecutionConfig{Workflow: wlp.GetId().GetName(), Version: wlp.GetId().GetVersion()} if executionConfig.Inputs, err = ParamMapForWorkflow(wlp); err != nil { return err } @@ -61,31 +61,31 @@ func CreateAndWriteExecConfigForWorkflow(wlp *admin.LaunchPlan, fileName string) func TaskInputs(task *admin.Task) map[string]*core.Variable { taskInputs := map[string]*core.Variable{} - if task == nil || task.Closure == nil { + if task == nil || task.GetClosure() == nil { return taskInputs } - if task.Closure.CompiledTask == nil { + if task.GetClosure().GetCompiledTask() == nil { return taskInputs } - if task.Closure.CompiledTask.Template == nil { + if task.GetClosure().GetCompiledTask().GetTemplate() == nil { return taskInputs } - if task.Closure.CompiledTask.Template.Interface == nil { + if task.GetClosure().GetCompiledTask().GetTemplate().GetInterface() == nil { return taskInputs } - if task.Closure.CompiledTask.Template.Interface.Inputs == nil { + if task.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetInputs() == nil { return taskInputs } - return task.Closure.CompiledTask.Template.Interface.Inputs.Variables + return task.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetInputs().GetVariables() } func ParamMapForTask(task *admin.Task) (map[string]yaml.Node, error) { taskInputs := TaskInputs(task) paramMap := make(map[string]yaml.Node, len(taskInputs)) for k, v := range taskInputs { - varTypeValue, err := coreutils.MakeDefaultLiteralForType(v.Type) + varTypeValue, err := coreutils.MakeDefaultLiteralForType(v.GetType()) if err != nil { - fmt.Println("error creating default value for literal type ", v.Type) + fmt.Println("error creating default value for literal type ", v.GetType()) return nil, err } var nativeLiteral interface{} @@ -93,11 +93,11 @@ func ParamMapForTask(task *admin.Task) (map[string]yaml.Node, error) { return nil, err } - if k == v.Description { + if k == v.GetDescription() { // a: # a isn't very helpful paramMap[k], err = getCommentedYamlNode(nativeLiteral, "") } else { - paramMap[k], err = getCommentedYamlNode(nativeLiteral, v.Description) + paramMap[k], err = getCommentedYamlNode(nativeLiteral, v.GetDescription()) } if err != nil { return nil, err @@ -108,22 +108,22 @@ func ParamMapForTask(task *admin.Task) (map[string]yaml.Node, error) { func WorkflowParams(lp *admin.LaunchPlan) map[string]*core.Parameter { workflowParams := map[string]*core.Parameter{} - if lp == nil || lp.Spec == nil { + if lp == nil || lp.GetSpec() == nil { return workflowParams } - if lp.Spec.DefaultInputs == nil { + if lp.GetSpec().GetDefaultInputs() == nil { return workflowParams } - return lp.Spec.DefaultInputs.Parameters + return lp.GetSpec().GetDefaultInputs().GetParameters() } func ParamMapForWorkflow(lp *admin.LaunchPlan) (map[string]yaml.Node, error) { workflowParams := WorkflowParams(lp) paramMap := make(map[string]yaml.Node, len(workflowParams)) for k, v := range workflowParams { - varTypeValue, err := coreutils.MakeDefaultLiteralForType(v.Var.Type) + varTypeValue, err := coreutils.MakeDefaultLiteralForType(v.GetVar().GetType()) if err != nil { - fmt.Println("error creating default value for literal type ", v.Var.Type) + fmt.Println("error creating default value for literal type ", v.GetVar().GetType()) return nil, err } var nativeLiteral interface{} @@ -131,16 +131,16 @@ func ParamMapForWorkflow(lp *admin.LaunchPlan) (map[string]yaml.Node, error) { return nil, err } // Override if there is a default value - if paramsDefault, ok := v.Behavior.(*core.Parameter_Default); ok { + if paramsDefault, ok := v.GetBehavior().(*core.Parameter_Default); ok { if nativeLiteral, err = coreutils.ExtractFromLiteral(paramsDefault.Default); err != nil { return nil, err } } - if k == v.Var.Description { + if k == v.GetVar().GetDescription() { // a: # a isn't very helpful paramMap[k], err = getCommentedYamlNode(nativeLiteral, "") } else { - paramMap[k], err = getCommentedYamlNode(nativeLiteral, v.Var.Description) + paramMap[k], err = getCommentedYamlNode(nativeLiteral, v.GetVar().GetDescription()) } if err != nil { diff --git a/flytectl/cmd/get/execution_util_test.go b/flytectl/cmd/get/execution_util_test.go index 2c081950f2..35b7160efb 100644 --- a/flytectl/cmd/get/execution_util_test.go +++ b/flytectl/cmd/get/execution_util_test.go @@ -18,7 +18,7 @@ func TestTaskInputs(t *testing.T) { t.Run("valid inputs", func(t *testing.T) { task := createTask() retValue := TaskInputs(task) - assert.Equal(t, task.Closure.CompiledTask.Template.Interface.Inputs.Variables, retValue) + assert.Equal(t, task.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetInputs().GetVariables(), retValue) }) t.Run("closure compiled task nil", func(t *testing.T) { task := createTask() diff --git a/flytectl/cmd/get/launch_plan.go b/flytectl/cmd/get/launch_plan.go index 79cede32c5..aaf1eacc52 100644 --- a/flytectl/cmd/get/launch_plan.go +++ b/flytectl/cmd/get/launch_plan.go @@ -152,12 +152,12 @@ func LaunchplanToTableProtoMessages(l []*admin.LaunchPlan) []proto.Message { messages := make([]proto.Message, 0, len(l)) for _, m := range l { m := proto.Clone(m).(*admin.LaunchPlan) - if m.Closure != nil { - if m.Closure.ExpectedInputs != nil && m.Closure.ExpectedInputs.Parameters != nil { - printer.FormatParameterDescriptions(m.Closure.ExpectedInputs.Parameters) + if m.GetClosure() != nil { + if m.GetClosure().GetExpectedInputs() != nil && m.Closure.ExpectedInputs.Parameters != nil { + printer.FormatParameterDescriptions(m.GetClosure().GetExpectedInputs().GetParameters()) } - if m.Closure.ExpectedOutputs != nil && m.Closure.ExpectedOutputs.Variables != nil { - printer.FormatVariableDescriptions(m.Closure.ExpectedOutputs.Variables) + if m.GetClosure().GetExpectedOutputs() != nil && m.Closure.ExpectedOutputs.Variables != nil { + printer.FormatVariableDescriptions(m.GetClosure().GetExpectedOutputs().GetVariables()) } } messages = append(messages, m) diff --git a/flytectl/cmd/get/launch_plan_test.go b/flytectl/cmd/get/launch_plan_test.go index 7b1359b7ec..64e1e99c09 100644 --- a/flytectl/cmd/get/launch_plan_test.go +++ b/flytectl/cmd/get/launch_plan_test.go @@ -275,7 +275,7 @@ func TestGetLaunchPlanFuncWithError(t *testing.T) { func TestGetLaunchPlanFunc(t *testing.T) { s := testutils.Setup(t) getLaunchPlanSetup() - s.FetcherExt.OnFetchAllVerOfLPMatch(mock.Anything, mock.Anything, "dummyProject", "dummyDomain", filters.Filters{}).Return(launchPlanListResponse.LaunchPlans, nil) + s.FetcherExt.OnFetchAllVerOfLPMatch(mock.Anything, mock.Anything, "dummyProject", "dummyDomain", filters.Filters{}).Return(launchPlanListResponse.GetLaunchPlans(), nil) err := getLaunchPlanFunc(s.Ctx, argsLp, s.CmdCtx) assert.Nil(t, err) s.FetcherExt.AssertCalled(t, "FetchAllVerOfLP", s.Ctx, "launchplan1", "dummyProject", "dummyDomain", launchplan.DefaultConfig.Filter) @@ -308,7 +308,7 @@ func TestGetLaunchPlans(t *testing.T) { t.Run("no workflow filter", func(t *testing.T) { s := testutils.Setup(t) getLaunchPlanSetup() - s.FetcherExt.OnFetchAllVerOfLP(s.Ctx, "", "dummyProject", "dummyDomain", filters.Filters{}).Return(launchPlanListResponse.LaunchPlans, nil) + s.FetcherExt.OnFetchAllVerOfLP(s.Ctx, "", "dummyProject", "dummyDomain", filters.Filters{}).Return(launchPlanListResponse.GetLaunchPlans(), nil) argsLp = []string{} err := getLaunchPlanFunc(s.Ctx, argsLp, s.CmdCtx) assert.Nil(t, err) @@ -319,7 +319,7 @@ func TestGetLaunchPlans(t *testing.T) { getLaunchPlanSetup() s.FetcherExt.OnFetchAllVerOfLP(s.Ctx, "", "dummyProject", "dummyDomain", filters.Filters{ FieldSelector: "workflow.name=workflow2", - }).Return(launchPlanListResponse.LaunchPlans, nil) + }).Return(launchPlanListResponse.GetLaunchPlans(), nil) argsLp = []string{} launchplan.DefaultConfig.Workflow = "workflow2" err := getLaunchPlanFunc(s.Ctx, argsLp, s.CmdCtx) @@ -379,7 +379,7 @@ func TestGetLaunchPlanTableFunc(t *testing.T) { s.MockAdminClient.OnListLaunchPlansMatch(s.Ctx, resourceGetRequest).Return(launchPlanListResponse, nil) s.MockAdminClient.OnGetLaunchPlanMatch(s.Ctx, objectGetRequest).Return(launchPlan2, nil) s.MockAdminClient.OnListLaunchPlanIdsMatch(s.Ctx, namedIDRequest).Return(namedIdentifierList, nil) - s.FetcherExt.OnFetchAllVerOfLP(s.Ctx, "launchplan1", "dummyProject", "dummyDomain", filters.Filters{}).Return(launchPlanListResponse.LaunchPlans, nil) + s.FetcherExt.OnFetchAllVerOfLP(s.Ctx, "launchplan1", "dummyProject", "dummyDomain", filters.Filters{}).Return(launchPlanListResponse.GetLaunchPlans(), nil) config.GetConfig().Output = printer.OutputFormatTABLE.String() err := getLaunchPlanFunc(s.Ctx, argsLp, s.CmdCtx) assert.Nil(t, err) diff --git a/flytectl/cmd/get/node_execution.go b/flytectl/cmd/get/node_execution.go index 89c902ddbd..2ebe23df1e 100644 --- a/flytectl/cmd/get/node_execution.go +++ b/flytectl/cmd/get/node_execution.go @@ -110,7 +110,7 @@ func getExecutionDetails(ctx context.Context, project, domain, execName, nodeNam } sort.Slice(nExecDetailsForView[:], func(i, j int) bool { - return nExecDetailsForView[i].NodeExec.Closure.CreatedAt.AsTime().Before(nExecDetailsForView[j].NodeExec.Closure.CreatedAt.AsTime()) + return nExecDetailsForView[i].NodeExec.Closure.GetCreatedAt().AsTime().Before(nExecDetailsForView[j].NodeExec.Closure.GetCreatedAt().AsTime()) }) return nExecDetailsForView, nil @@ -125,49 +125,49 @@ func getNodeExecDetailsInt(ctx context.Context, project, domain, execName, nodeN } var nodeExecClosures []*NodeExecutionClosure - for _, nodeExec := range nExecDetails.NodeExecutions { + for _, nodeExec := range nExecDetails.GetNodeExecutions() { nodeExecClosure := &NodeExecutionClosure{ NodeExec: &NodeExecution{nodeExec}, } nodeExecClosures = append(nodeExecClosures, nodeExecClosure) // Check if this is parent node. If yes do recursive call to get child nodes. - if nodeExec.Metadata != nil && nodeExec.Metadata.IsParentNode { - nodeExecClosure.ChildNodes, err = getNodeExecDetailsInt(ctx, project, domain, execName, nodeName, nodeExec.Id.NodeId, nodeExecDetailsMap, cmdCtx) + if nodeExec.GetMetadata() != nil && nodeExec.GetMetadata().GetIsParentNode() { + nodeExecClosure.ChildNodes, err = getNodeExecDetailsInt(ctx, project, domain, execName, nodeName, nodeExec.GetId().GetNodeId(), nodeExecDetailsMap, cmdCtx) if err != nil { return nil, err } } else { taskExecList, err := cmdCtx.AdminFetcherExt().FetchTaskExecutionsOnNode(ctx, - nodeExec.Id.NodeId, execName, project, domain) + nodeExec.GetId().GetNodeId(), execName, project, domain) if err != nil { return nil, err } - for _, taskExec := range taskExecList.TaskExecutions { + for _, taskExec := range taskExecList.GetTaskExecutions() { taskExecClosure := &TaskExecutionClosure{ TaskExecution: &TaskExecution{taskExec}, } nodeExecClosure.TaskExecutions = append(nodeExecClosure.TaskExecutions, taskExecClosure) } // Fetch the node inputs and outputs - nExecDataResp, err := cmdCtx.AdminFetcherExt().FetchNodeExecutionData(ctx, nodeExec.Id.NodeId, execName, project, domain) + nExecDataResp, err := cmdCtx.AdminFetcherExt().FetchNodeExecutionData(ctx, nodeExec.GetId().GetNodeId(), execName, project, domain) if err != nil { return nil, err } // Extract the inputs from the literal map - nodeExecClosure.Inputs, err = extractLiteralMap(nExecDataResp.FullInputs) + nodeExecClosure.Inputs, err = extractLiteralMap(nExecDataResp.GetFullInputs()) if err != nil { return nil, err } // Extract the outputs from the literal map - nodeExecClosure.Outputs, err = extractLiteralMap(nExecDataResp.FullOutputs) + nodeExecClosure.Outputs, err = extractLiteralMap(nExecDataResp.GetFullOutputs()) if err != nil { return nil, err } } - nodeExecDetailsMap[nodeExec.Id.NodeId] = nodeExecClosure + nodeExecDetailsMap[nodeExec.GetId().GetNodeId()] = nodeExecClosure // Found the node - if len(nodeName) > 0 && nodeName == nodeExec.Id.NodeId { + if len(nodeName) > 0 && nodeName == nodeExec.GetId().GetNodeId() { return nodeExecClosures, err } } @@ -183,38 +183,38 @@ func createNodeTaskExecTreeView(rootView gotree.Tree, taskExecClosures []*TaskEx } // TODO: Replace this by filter to sort in the admin sort.Slice(taskExecClosures[:], func(i, j int) bool { - return taskExecClosures[i].Id.RetryAttempt < taskExecClosures[j].Id.RetryAttempt + return taskExecClosures[i].Id.GetRetryAttempt() < taskExecClosures[j].Id.GetRetryAttempt() }) for _, taskExecClosure := range taskExecClosures { - attemptView := rootView.Add(taskAttemptPrefix + strconv.Itoa(int(taskExecClosure.Id.RetryAttempt))) - attemptView.Add(taskExecPrefix + taskExecClosure.Closure.Phase.String() + - hyphenPrefix + taskExecClosure.Closure.CreatedAt.AsTime().String() + - hyphenPrefix + taskExecClosure.Closure.UpdatedAt.AsTime().String()) - attemptView.Add(taskTypePrefix + taskExecClosure.Closure.TaskType) - attemptView.Add(taskReasonPrefix + taskExecClosure.Closure.Reason) - if taskExecClosure.Closure.Metadata != nil { + attemptView := rootView.Add(taskAttemptPrefix + strconv.Itoa(int(taskExecClosure.Id.GetRetryAttempt()))) + attemptView.Add(taskExecPrefix + taskExecClosure.Closure.GetPhase().String() + + hyphenPrefix + taskExecClosure.Closure.GetCreatedAt().AsTime().String() + + hyphenPrefix + taskExecClosure.Closure.GetUpdatedAt().AsTime().String()) + attemptView.Add(taskTypePrefix + taskExecClosure.Closure.GetTaskType()) + attemptView.Add(taskReasonPrefix + taskExecClosure.Closure.GetReason()) + if taskExecClosure.Closure.GetMetadata() != nil { metadata := attemptView.Add(taskMetadataPrefix) - metadata.Add(taskGeneratedNamePrefix + taskExecClosure.Closure.Metadata.GeneratedName) - metadata.Add(taskPluginIDPrefix + taskExecClosure.Closure.Metadata.PluginIdentifier) + metadata.Add(taskGeneratedNamePrefix + taskExecClosure.Closure.GetMetadata().GetGeneratedName()) + metadata.Add(taskPluginIDPrefix + taskExecClosure.Closure.GetMetadata().GetPluginIdentifier()) extResourcesView := metadata.Add(taskExtResourcesPrefix) - for _, extResource := range taskExecClosure.Closure.Metadata.ExternalResources { - extResourcesView.Add(taskExtResourcePrefix + extResource.ExternalId) + for _, extResource := range taskExecClosure.Closure.GetMetadata().GetExternalResources() { + extResourcesView.Add(taskExtResourcePrefix + extResource.GetExternalId()) } resourcePoolInfoView := metadata.Add(taskResourcePrefix) - for _, rsPool := range taskExecClosure.Closure.Metadata.ResourcePoolInfo { - resourcePoolInfoView.Add(taskExtResourcePrefix + rsPool.Namespace) - resourcePoolInfoView.Add(taskExtResourceTokenPrefix + rsPool.AllocationToken) + for _, rsPool := range taskExecClosure.Closure.GetMetadata().GetResourcePoolInfo() { + resourcePoolInfoView.Add(taskExtResourcePrefix + rsPool.GetNamespace()) + resourcePoolInfoView.Add(taskExtResourceTokenPrefix + rsPool.GetAllocationToken()) } } - sort.Slice(taskExecClosure.Closure.Logs[:], func(i, j int) bool { - return taskExecClosure.Closure.Logs[i].Name < taskExecClosure.Closure.Logs[j].Name + sort.Slice(taskExecClosure.Closure.GetLogs()[:], func(i, j int) bool { + return taskExecClosure.Closure.GetLogs()[i].GetName() < taskExecClosure.Closure.GetLogs()[j].GetName() }) logsView := attemptView.Add(taskLogsPrefix) - for _, logData := range taskExecClosure.Closure.Logs { - logsView.Add(taskLogsNamePrefix + logData.Name) - logsView.Add(taskLogURIPrefix + logData.Uri) + for _, logData := range taskExecClosure.Closure.GetLogs() { + logsView.Add(taskLogsNamePrefix + logData.GetName()) + logsView.Add(taskLogURIPrefix + logData.GetUri()) } } } @@ -228,13 +228,13 @@ func createNodeDetailsTreeView(rootView gotree.Tree, nodeExecutionClosures []*No } // TODO : Move to sorting using filters. sort.Slice(nodeExecutionClosures[:], func(i, j int) bool { - return nodeExecutionClosures[i].NodeExec.Closure.CreatedAt.AsTime().Before(nodeExecutionClosures[j].NodeExec.Closure.CreatedAt.AsTime()) + return nodeExecutionClosures[i].NodeExec.Closure.GetCreatedAt().AsTime().Before(nodeExecutionClosures[j].NodeExec.Closure.GetCreatedAt().AsTime()) }) for _, nodeExecWrapper := range nodeExecutionClosures { - nExecView := rootView.Add(nodeExecWrapper.NodeExec.Id.NodeId + hyphenPrefix + nodeExecWrapper.NodeExec.Closure.Phase.String() + - hyphenPrefix + nodeExecWrapper.NodeExec.Closure.CreatedAt.AsTime().String() + - hyphenPrefix + nodeExecWrapper.NodeExec.Closure.UpdatedAt.AsTime().String()) + nExecView := rootView.Add(nodeExecWrapper.NodeExec.Id.GetNodeId() + hyphenPrefix + nodeExecWrapper.NodeExec.Closure.GetPhase().String() + + hyphenPrefix + nodeExecWrapper.NodeExec.Closure.GetCreatedAt().AsTime().String() + + hyphenPrefix + nodeExecWrapper.NodeExec.Closure.GetUpdatedAt().AsTime().String()) if len(nodeExecWrapper.ChildNodes) > 0 { createNodeDetailsTreeView(nExecView, nodeExecWrapper.ChildNodes) } @@ -254,7 +254,7 @@ func extractLiteralMap(literalMap *core.LiteralMap) (map[string]interface{}, err if literalMap == nil || literalMap.Literals == nil { return m, nil } - for key, literalVal := range literalMap.Literals { + for key, literalVal := range literalMap.GetLiterals() { extractedLiteralVal, err := coreutils.ExtractFromLiteral(literalVal) if err != nil { return nil, err diff --git a/flytectl/cmd/get/project.go b/flytectl/cmd/get/project.go index 96b68c56fb..3d1bd87ce2 100644 --- a/flytectl/cmd/get/project.go +++ b/flytectl/cmd/get/project.go @@ -84,9 +84,9 @@ func getProjectsFunc(ctx context.Context, args []string, cmdCtx cmdCore.CommandC if len(args) == 1 { id := args[0] - logger.Debugf(ctx, "Retrieved %v projects", len(projects.Projects)) - for _, v := range projects.Projects { - if v.Id == id { + logger.Debugf(ctx, "Retrieved %v projects", len(projects.GetProjects())) + for _, v := range projects.GetProjects() { + if v.GetId() == id { err := adminPrinter.Print(config.GetConfig().MustOutputFormat(), projectColumns, v) if err != nil { return err @@ -97,6 +97,6 @@ func getProjectsFunc(ctx context.Context, args []string, cmdCtx cmdCore.CommandC return nil } - logger.Debugf(ctx, "Retrieved %v projects", len(projects.Projects)) - return adminPrinter.Print(config.GetConfig().MustOutputFormat(), projectColumns, ProjectToProtoMessages(projects.Projects)...) + logger.Debugf(ctx, "Retrieved %v projects", len(projects.GetProjects())) + return adminPrinter.Print(config.GetConfig().MustOutputFormat(), projectColumns, ProjectToProtoMessages(projects.GetProjects())...) } diff --git a/flytectl/cmd/get/task.go b/flytectl/cmd/get/task.go index 0b050f5546..fcbb00b684 100644 --- a/flytectl/cmd/get/task.go +++ b/flytectl/cmd/get/task.go @@ -125,14 +125,14 @@ func TaskToTableProtoMessages(l []*admin.Task) []proto.Message { messages := make([]proto.Message, 0, len(l)) for _, m := range l { m := proto.Clone(m).(*admin.Task) - if m.Closure != nil && m.Closure.CompiledTask != nil { - if m.Closure.CompiledTask.Template != nil { - if m.Closure.CompiledTask.Template.Interface != nil { - if m.Closure.CompiledTask.Template.Interface.Inputs != nil && m.Closure.CompiledTask.Template.Interface.Inputs.Variables != nil { - printer.FormatVariableDescriptions(m.Closure.CompiledTask.Template.Interface.Inputs.Variables) + if m.GetClosure() != nil && m.GetClosure().GetCompiledTask() != nil { + if m.GetClosure().GetCompiledTask().GetTemplate() != nil { + if m.GetClosure().GetCompiledTask().GetTemplate().GetInterface() != nil { + if m.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetInputs() != nil && m.Closure.CompiledTask.Template.Interface.Inputs.Variables != nil { + printer.FormatVariableDescriptions(m.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetInputs().GetVariables()) } - if m.Closure.CompiledTask.Template.Interface.Outputs != nil && m.Closure.CompiledTask.Template.Interface.Outputs.Variables != nil { - printer.FormatVariableDescriptions(m.Closure.CompiledTask.Template.Interface.Outputs.Variables) + if m.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetOutputs() != nil && m.Closure.CompiledTask.Template.Interface.Outputs.Variables != nil { + printer.FormatVariableDescriptions(m.GetClosure().GetCompiledTask().GetTemplate().GetInterface().GetOutputs().GetVariables()) } } } diff --git a/flytectl/cmd/get/task_test.go b/flytectl/cmd/get/task_test.go index d0f817fd1e..e5c2dafc94 100644 --- a/flytectl/cmd/get/task_test.go +++ b/flytectl/cmd/get/task_test.go @@ -244,7 +244,7 @@ func TestGetTaskFunc(t *testing.T) { s.MockAdminClient.OnListTasksMatch(s.Ctx, resourceListRequestTask).Return(taskListResponse, nil) s.MockAdminClient.OnGetTaskMatch(s.Ctx, objectGetRequestTask).Return(task2, nil) s.FetcherExt.OnFetchAllVerOfTaskMatch(mock.Anything, mock.Anything, mock.Anything, - mock.Anything, mock.Anything).Return(taskListResponse.Tasks, nil) + mock.Anything, mock.Anything).Return(taskListResponse.GetTasks(), nil) err := getTaskFunc(s.Ctx, argsTask, s.CmdCtx) assert.Nil(t, err) s.FetcherExt.AssertCalled(t, "FetchAllVerOfTask", s.Ctx, "task1", "dummyProject", "dummyDomain", filters.Filters{}) @@ -329,7 +329,7 @@ func TestGetTaskFuncWithTable(t *testing.T) { taskConfig.DefaultConfig.Filter = filters.Filters{} s.MockAdminClient.OnListTasksMatch(s.Ctx, resourceListRequestTask).Return(taskListResponse, nil) s.MockAdminClient.OnGetTaskMatch(s.Ctx, objectGetRequestTask).Return(task2, nil) - s.FetcherExt.OnFetchAllVerOfTask(s.Ctx, "task1", "dummyProject", "dummyDomain", filters.Filters{}).Return(taskListResponse.Tasks, nil) + s.FetcherExt.OnFetchAllVerOfTask(s.Ctx, "task1", "dummyProject", "dummyDomain", filters.Filters{}).Return(taskListResponse.GetTasks(), nil) config.GetConfig().Output = "table" err := getTaskFunc(s.Ctx, argsTask, s.CmdCtx) assert.Nil(t, err) @@ -455,7 +455,7 @@ func TestGetTasks(t *testing.T) { taskConfig.DefaultConfig.Filter = filters.Filters{} s.MockAdminClient.OnListTasksMatch(s.Ctx, resourceListRequestTask).Return(taskListResponse, nil) s.MockAdminClient.OnGetTaskMatch(s.Ctx, objectGetRequestTask).Return(task2, nil) - s.FetcherExt.OnFetchAllVerOfTask(s.Ctx, "task1", "dummyProject", "dummyDomain", filters.Filters{}).Return(taskListResponse.Tasks, nil) + s.FetcherExt.OnFetchAllVerOfTask(s.Ctx, "task1", "dummyProject", "dummyDomain", filters.Filters{}).Return(taskListResponse.GetTasks(), nil) err := getTaskFunc(s.Ctx, argsTask, s.CmdCtx) assert.Nil(t, err) @@ -471,8 +471,8 @@ func TestGetTasksFilters(t *testing.T) { } s.MockAdminClient.OnListTasksMatch(s.Ctx, resourceListFilterRequestTask).Return(taskListFilterResponse, nil) filteredTasks := []*admin.Task{} - for _, task := range taskListResponse.Tasks { - if task.Id.Name == "task1" && task.Id.Version == "v1" { + for _, task := range taskListResponse.GetTasks() { + if task.GetId().GetName() == "task1" && task.GetId().GetVersion() == "v1" { filteredTasks = append(filteredTasks, task) } } diff --git a/flytectl/cmd/get/workflow.go b/flytectl/cmd/get/workflow.go index 624e8d2ba8..ce6dc4db8d 100644 --- a/flytectl/cmd/get/workflow.go +++ b/flytectl/cmd/get/workflow.go @@ -129,15 +129,15 @@ func WorkflowToTableProtoMessages(l []*admin.Workflow) []proto.Message { messages := make([]proto.Message, 0, len(l)) for _, m := range l { m := proto.Clone(m).(*admin.Workflow) - if m.Closure != nil && m.Closure.CompiledWorkflow != nil { - if m.Closure.CompiledWorkflow.Primary != nil { - if m.Closure.CompiledWorkflow.Primary.Template != nil { - if m.Closure.CompiledWorkflow.Primary.Template.Interface != nil { - if m.Closure.CompiledWorkflow.Primary.Template.Interface.Inputs != nil && m.Closure.CompiledWorkflow.Primary.Template.Interface.Inputs.Variables != nil { - printer.FormatVariableDescriptions(m.Closure.CompiledWorkflow.Primary.Template.Interface.Inputs.Variables) + if m.GetClosure() != nil && m.GetClosure().GetCompiledWorkflow() != nil { + if m.GetClosure().GetCompiledWorkflow().GetPrimary() != nil { + if m.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate() != nil { + if m.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface() != nil { + if m.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface().GetInputs() != nil && m.Closure.CompiledWorkflow.Primary.Template.Interface.Inputs.Variables != nil { + printer.FormatVariableDescriptions(m.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface().GetInputs().GetVariables()) } - if m.Closure.CompiledWorkflow.Primary.Template.Interface.Outputs != nil && m.Closure.CompiledWorkflow.Primary.Template.Interface.Outputs.Variables != nil { - printer.FormatVariableDescriptions(m.Closure.CompiledWorkflow.Primary.Template.Interface.Outputs.Variables) + if m.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface().GetOutputs() != nil && m.Closure.CompiledWorkflow.Primary.Template.Interface.Outputs.Variables != nil { + printer.FormatVariableDescriptions(m.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface().GetOutputs().GetVariables()) } } } diff --git a/flytectl/cmd/register/register_util.go b/flytectl/cmd/register/register_util.go index b7b419e611..4ef1bab1c1 100644 --- a/flytectl/cmd/register/register_util.go +++ b/flytectl/cmd/register/register_util.go @@ -171,10 +171,10 @@ func register(ctx context.Context, message proto.Message, cmdCtx cmdCore.Command ResourceType: core.ResourceType_LAUNCH_PLAN, Project: config.GetConfig().Project, Domain: config.GetConfig().Domain, - Name: launchPlan.Id.Name, - Version: launchPlan.Id.Version, + Name: launchPlan.GetId().GetName(), + Version: launchPlan.GetId().GetVersion(), }, - Spec: launchPlan.Spec, + Spec: launchPlan.GetSpec(), }) if err != nil { return err @@ -185,8 +185,8 @@ func register(ctx context.Context, message proto.Message, cmdCtx cmdCore.Command Id: &core.Identifier{ Project: config.GetConfig().Project, Domain: config.GetConfig().Domain, - Name: launchPlan.Id.Name, - Version: launchPlan.Id.Version, + Name: launchPlan.GetId().GetName(), + Version: launchPlan.GetId().GetVersion(), }, State: admin.LaunchPlanState_ACTIVE, }) @@ -205,8 +205,8 @@ func register(ctx context.Context, message proto.Message, cmdCtx cmdCore.Command ResourceType: core.ResourceType_WORKFLOW, Project: config.GetConfig().Project, Domain: config.GetConfig().Domain, - Name: workflowSpec.Template.Id.Name, - Version: workflowSpec.Template.Id.Version, + Name: workflowSpec.GetTemplate().GetId().GetName(), + Version: workflowSpec.GetTemplate().GetId().GetVersion(), }, Spec: workflowSpec, }) @@ -223,8 +223,8 @@ func register(ctx context.Context, message proto.Message, cmdCtx cmdCore.Command ResourceType: core.ResourceType_TASK, Project: config.GetConfig().Project, Domain: config.GetConfig().Domain, - Name: taskSpec.Template.Id.Name, - Version: taskSpec.Template.Id.Version, + Name: taskSpec.GetTemplate().GetId().GetName(), + Version: taskSpec.GetTemplate().GetId().GetVersion(), }, Spec: taskSpec, }) @@ -235,39 +235,39 @@ func register(ctx context.Context, message proto.Message, cmdCtx cmdCore.Command } func hydrateNode(node *core.Node, version string, force bool) error { - targetNode := node.Target + targetNode := node.GetTarget() switch v := targetNode.(type) { case *core.Node_TaskNode: taskNodeWrapper := targetNode.(*core.Node_TaskNode) - taskNodeReference := taskNodeWrapper.TaskNode.Reference.(*core.TaskNode_ReferenceId) + taskNodeReference := taskNodeWrapper.TaskNode.GetReference().(*core.TaskNode_ReferenceId) hydrateIdentifier(taskNodeReference.ReferenceId, version, force) case *core.Node_WorkflowNode: workflowNodeWrapper := targetNode.(*core.Node_WorkflowNode) - switch workflowNodeWrapper.WorkflowNode.Reference.(type) { + switch workflowNodeWrapper.WorkflowNode.GetReference().(type) { case *core.WorkflowNode_SubWorkflowRef: - subWorkflowNodeReference := workflowNodeWrapper.WorkflowNode.Reference.(*core.WorkflowNode_SubWorkflowRef) + subWorkflowNodeReference := workflowNodeWrapper.WorkflowNode.GetReference().(*core.WorkflowNode_SubWorkflowRef) hydrateIdentifier(subWorkflowNodeReference.SubWorkflowRef, version, force) case *core.WorkflowNode_LaunchplanRef: - launchPlanNodeReference := workflowNodeWrapper.WorkflowNode.Reference.(*core.WorkflowNode_LaunchplanRef) + launchPlanNodeReference := workflowNodeWrapper.WorkflowNode.GetReference().(*core.WorkflowNode_LaunchplanRef) hydrateIdentifier(launchPlanNodeReference.LaunchplanRef, version, force) default: - return fmt.Errorf("unknown type %T", workflowNodeWrapper.WorkflowNode.Reference) + return fmt.Errorf("unknown type %T", workflowNodeWrapper.WorkflowNode.GetReference()) } case *core.Node_BranchNode: branchNodeWrapper := targetNode.(*core.Node_BranchNode) - if err := hydrateNode(branchNodeWrapper.BranchNode.IfElse.Case.ThenNode, version, force); err != nil { + if err := hydrateNode(branchNodeWrapper.BranchNode.GetIfElse().GetCase().GetThenNode(), version, force); err != nil { return fmt.Errorf("failed to hydrateNode") } - if len(branchNodeWrapper.BranchNode.IfElse.Other) > 0 { - for _, ifBlock := range branchNodeWrapper.BranchNode.IfElse.Other { - if err := hydrateNode(ifBlock.ThenNode, version, force); err != nil { + if len(branchNodeWrapper.BranchNode.GetIfElse().GetOther()) > 0 { + for _, ifBlock := range branchNodeWrapper.BranchNode.GetIfElse().GetOther() { + if err := hydrateNode(ifBlock.GetThenNode(), version, force); err != nil { return fmt.Errorf("failed to hydrateNode") } } } - switch branchNodeWrapper.BranchNode.IfElse.Default.(type) { + switch branchNodeWrapper.BranchNode.GetIfElse().GetDefault().(type) { case *core.IfElseBlock_ElseNode: - elseNodeReference := branchNodeWrapper.BranchNode.IfElse.Default.(*core.IfElseBlock_ElseNode) + elseNodeReference := branchNodeWrapper.BranchNode.GetIfElse().GetDefault().(*core.IfElseBlock_ElseNode) if err := hydrateNode(elseNodeReference.ElseNode, version, force); err != nil { return fmt.Errorf("failed to hydrateNode") } @@ -275,12 +275,12 @@ func hydrateNode(node *core.Node, version string, force bool) error { case *core.IfElseBlock_Error: // Do nothing. default: - return fmt.Errorf("unknown type %T", branchNodeWrapper.BranchNode.IfElse.Default) + return fmt.Errorf("unknown type %T", branchNodeWrapper.BranchNode.GetIfElse().GetDefault()) } case *core.Node_GateNode: // Do nothing. case *core.Node_ArrayNode: - if err := hydrateNode(v.ArrayNode.Node, version, force); err != nil { + if err := hydrateNode(v.ArrayNode.GetNode(), version, force); err != nil { return fmt.Errorf("failed to hydrateNode") } default: @@ -290,33 +290,33 @@ func hydrateNode(node *core.Node, version string, force bool) error { } func hydrateIdentifier(identifier *core.Identifier, version string, force bool) { - if identifier.Project == "" || identifier.Project == registrationProjectPattern { + if identifier.GetProject() == "" || identifier.GetProject() == registrationProjectPattern { identifier.Project = config.GetConfig().Project } - if identifier.Domain == "" || identifier.Domain == registrationDomainPattern { + if identifier.GetDomain() == "" || identifier.GetDomain() == registrationDomainPattern { identifier.Domain = config.GetConfig().Domain } - if force || identifier.Version == "" || identifier.Version == registrationVersionPattern { + if force || identifier.GetVersion() == "" || identifier.GetVersion() == registrationVersionPattern { identifier.Version = version } } func hydrateTaskSpec(task *admin.TaskSpec, sourceUploadedLocation storage.DataReference, destinationDir string) error { - if task.Template.GetContainer() != nil { - for k := range task.Template.GetContainer().Args { - if task.Template.GetContainer().Args[k] == registrationRemotePackagePattern { + if task.GetTemplate().GetContainer() != nil { + for k := range task.GetTemplate().GetContainer().GetArgs() { + if task.GetTemplate().GetContainer().GetArgs()[k] == registrationRemotePackagePattern { task.Template.GetContainer().Args[k] = sourceUploadedLocation.String() } - if task.Template.GetContainer().Args[k] == registrationDestDirPattern { + if task.GetTemplate().GetContainer().GetArgs()[k] == registrationDestDirPattern { task.Template.GetContainer().Args[k] = "." if len(destinationDir) > 0 { task.Template.GetContainer().Args[k] = destinationDir } } } - } else if task.Template.GetK8SPod() != nil && task.Template.GetK8SPod().PodSpec != nil { + } else if task.GetTemplate().GetK8SPod() != nil && task.GetTemplate().GetK8SPod().GetPodSpec() != nil { var podSpec = v1.PodSpec{} - err := utils.UnmarshalStructToObj(task.Template.GetK8SPod().PodSpec, &podSpec) + err := utils.UnmarshalStructToObj(task.GetTemplate().GetK8SPod().GetPodSpec(), &podSpec) if err != nil { return err } @@ -339,9 +339,9 @@ func hydrateTaskSpec(task *admin.TaskSpec, sourceUploadedLocation storage.DataRe } task.Template.Target = &core.TaskTemplate_K8SPod{ K8SPod: &core.K8SPod{ - Metadata: task.Template.GetK8SPod().Metadata, + Metadata: task.GetTemplate().GetK8SPod().GetMetadata(), PodSpec: podSpecStruct, - DataConfig: task.Template.GetK8SPod().DataConfig, + DataConfig: task.GetTemplate().GetK8SPod().GetDataConfig(), }, } } @@ -349,15 +349,15 @@ func hydrateTaskSpec(task *admin.TaskSpec, sourceUploadedLocation storage.DataRe } func validateLPWithSchedule(lpSpec *admin.LaunchPlanSpec, wf *admin.Workflow) error { - schedule := lpSpec.EntityMetadata.Schedule + schedule := lpSpec.GetEntityMetadata().GetSchedule() var scheduleRequiredParams []string - if wf != nil && wf.Closure != nil && wf.Closure.CompiledWorkflow != nil && - wf.Closure.CompiledWorkflow.Primary != nil && wf.Closure.CompiledWorkflow.Primary.Template != nil && - wf.Closure.CompiledWorkflow.Primary.Template.Interface != nil && - wf.Closure.CompiledWorkflow.Primary.Template.Interface.Inputs != nil { - variables := wf.Closure.CompiledWorkflow.Primary.Template.Interface.Inputs.Variables + if wf != nil && wf.GetClosure() != nil && wf.GetClosure().GetCompiledWorkflow() != nil && + wf.GetClosure().GetCompiledWorkflow().GetPrimary() != nil && wf.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate() != nil && + wf.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface() != nil && + wf.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface().GetInputs() != nil { + variables := wf.GetClosure().GetCompiledWorkflow().GetPrimary().GetTemplate().GetInterface().GetInputs().GetVariables() for varName := range variables { - if varName != schedule.KickoffTimeInputArg { + if varName != schedule.GetKickoffTimeInputArg() { scheduleRequiredParams = append(scheduleRequiredParams, varName) } } @@ -366,16 +366,16 @@ func validateLPWithSchedule(lpSpec *admin.LaunchPlanSpec, wf *admin.Workflow) er // Either the scheduled param should have default or fixed values var scheduleParamsWithValues []string // Check for default values - if lpSpec.DefaultInputs != nil { - for paramName, paramValue := range lpSpec.DefaultInputs.Parameters { - if paramName != schedule.KickoffTimeInputArg && paramValue.GetDefault() != nil { + if lpSpec.GetDefaultInputs() != nil { + for paramName, paramValue := range lpSpec.GetDefaultInputs().GetParameters() { + if paramName != schedule.GetKickoffTimeInputArg() && paramValue.GetDefault() != nil { scheduleParamsWithValues = append(scheduleParamsWithValues, paramName) } } } // Check for fixed values - if lpSpec.FixedInputs != nil && lpSpec.FixedInputs.Literals != nil { - for fixedLiteralName := range lpSpec.FixedInputs.Literals { + if lpSpec.GetFixedInputs() != nil && lpSpec.FixedInputs.Literals != nil { + for fixedLiteralName := range lpSpec.GetFixedInputs().GetLiterals() { scheduleParamsWithValues = append(scheduleParamsWithValues, fixedLiteralName) } } @@ -389,14 +389,14 @@ func validateLPWithSchedule(lpSpec *admin.LaunchPlanSpec, wf *admin.Workflow) er } func validateLaunchSpec(ctx context.Context, lpSpec *admin.LaunchPlanSpec, cmdCtx cmdCore.CommandContext) error { - if lpSpec == nil || lpSpec.WorkflowId == nil || lpSpec.EntityMetadata == nil || - lpSpec.EntityMetadata.Schedule == nil { + if lpSpec == nil || lpSpec.GetWorkflowId() == nil || lpSpec.GetEntityMetadata() == nil || + lpSpec.GetEntityMetadata().GetSchedule() == nil { return nil } // Fetch the workflow spec using the identifier - workflowID := lpSpec.WorkflowId - wf, err := cmdCtx.AdminFetcherExt().FetchWorkflowVersion(ctx, workflowID.Name, workflowID.Version, - workflowID.Project, workflowID.Domain) + workflowID := lpSpec.GetWorkflowId() + wf, err := cmdCtx.AdminFetcherExt().FetchWorkflowVersion(ctx, workflowID.GetName(), workflowID.GetVersion(), + workflowID.GetProject(), workflowID.GetDomain()) if err != nil { return err } @@ -464,7 +464,7 @@ func validateSpec(ctx context.Context, message proto.Message, cmdCtx cmdCore.Com switch v := message.(type) { case *admin.LaunchPlan: launchPlan := v - if err := validateLaunchSpec(ctx, launchPlan.Spec, cmdCtx); err != nil { + if err := validateLaunchSpec(ctx, launchPlan.GetSpec(), cmdCtx); err != nil { return err } } @@ -475,26 +475,26 @@ func hydrateSpec(message proto.Message, uploadLocation storage.DataReference, co switch v := message.(type) { case *admin.LaunchPlan: launchPlan := message.(*admin.LaunchPlan) - hydrateIdentifier(launchPlan.Id, config.Version, config.Force) - hydrateIdentifier(launchPlan.Spec.WorkflowId, config.Version, config.Force) - if err := hydrateLaunchPlanSpec(config.AssumableIamRole, config.K8sServiceAccount, config.OutputLocationPrefix, launchPlan.Spec); err != nil { + hydrateIdentifier(launchPlan.GetId(), config.Version, config.Force) + hydrateIdentifier(launchPlan.GetSpec().GetWorkflowId(), config.Version, config.Force) + if err := hydrateLaunchPlanSpec(config.AssumableIamRole, config.K8sServiceAccount, config.OutputLocationPrefix, launchPlan.GetSpec()); err != nil { return err } case *admin.WorkflowSpec: workflowSpec := message.(*admin.WorkflowSpec) - for _, Noderef := range workflowSpec.Template.Nodes { + for _, Noderef := range workflowSpec.GetTemplate().GetNodes() { if err := hydrateNode(Noderef, config.Version, config.Force); err != nil { return err } } - if workflowSpec.Template.GetFailureNode() != nil { - if err := hydrateNode(workflowSpec.Template.GetFailureNode(), config.Version, config.Force); err != nil { + if workflowSpec.GetTemplate().GetFailureNode() != nil { + if err := hydrateNode(workflowSpec.GetTemplate().GetFailureNode(), config.Version, config.Force); err != nil { return err } } - hydrateIdentifier(workflowSpec.Template.Id, config.Version, config.Force) - for _, subWorkflow := range workflowSpec.SubWorkflows { - for _, Noderef := range subWorkflow.Nodes { + hydrateIdentifier(workflowSpec.GetTemplate().GetId(), config.Version, config.Force) + for _, subWorkflow := range workflowSpec.GetSubWorkflows() { + for _, Noderef := range subWorkflow.GetNodes() { if err := hydrateNode(Noderef, config.Version, config.Force); err != nil { return err } @@ -504,11 +504,11 @@ func hydrateSpec(message proto.Message, uploadLocation storage.DataReference, co return err } } - hydrateIdentifier(subWorkflow.Id, config.Version, config.Force) + hydrateIdentifier(subWorkflow.GetId(), config.Version, config.Force) } case *admin.TaskSpec: taskSpec := message.(*admin.TaskSpec) - hydrateIdentifier(taskSpec.Template.Id, config.Version, config.Force) + hydrateIdentifier(taskSpec.GetTemplate().GetId(), config.Version, config.Force) // In case of fast serialize input proto also have on additional variable to substitute i.e destination bucket for source code if err := hydrateTaskSpec(taskSpec, uploadLocation, config.DestinationDirectory); err != nil { return err @@ -607,7 +607,7 @@ func readAndCopyArchive(src io.Reader, tempDir string, unarchivedFiles []string) } } } else if header.Typeflag == tar.TypeReg { - dest, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode)) + dest, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode)) // #nosec G115 if err != nil { return unarchivedFiles, err } @@ -814,8 +814,8 @@ func uploadFastRegisterArtifact(ctx context.Context, project, domain, sourceCode } } - if resp != nil && len(resp.SignedUrl) > 0 { - return storage.DataReference(resp.NativeUrl), DirectUpload(resp.SignedUrl, h, size, dataRefReaderCloser) + if resp != nil && len(resp.GetSignedUrl()) > 0 { + return storage.DataReference(resp.GetNativeUrl()), DirectUpload(resp.GetSignedUrl(), h, size, dataRefReaderCloser) } dataStore, err := getStorageClient(ctx) diff --git a/flytectl/cmd/register/register_util_test.go b/flytectl/cmd/register/register_util_test.go index e068c0f64a..fbe6e8b6f2 100644 --- a/flytectl/cmd/register/register_util_test.go +++ b/flytectl/cmd/register/register_util_test.go @@ -359,8 +359,8 @@ func TestHydrateLaunchPlanSpec(t *testing.T) { lpSpec := &admin.LaunchPlanSpec{} err := hydrateLaunchPlanSpec(rconfig.DefaultFilesConfig.AssumableIamRole, rconfig.DefaultFilesConfig.K8sServiceAccount, rconfig.DefaultFilesConfig.OutputLocationPrefix, lpSpec) assert.Nil(t, err) - assert.Equal(t, &admin.AuthRole{AssumableIamRole: "iamRole"}, lpSpec.AuthRole) - assert.Equal(t, &core.SecurityContext{RunAs: &core.Identity{IamRole: "iamRole"}}, lpSpec.SecurityContext) + assert.Equal(t, &admin.AuthRole{AssumableIamRole: "iamRole"}, lpSpec.GetAuthRole()) + assert.Equal(t, &core.SecurityContext{RunAs: &core.Identity{IamRole: "iamRole"}}, lpSpec.GetSecurityContext()) }) t.Run("k8sService account override", func(t *testing.T) { registerFilesSetup() @@ -368,8 +368,8 @@ func TestHydrateLaunchPlanSpec(t *testing.T) { lpSpec := &admin.LaunchPlanSpec{} err := hydrateLaunchPlanSpec(rconfig.DefaultFilesConfig.AssumableIamRole, rconfig.DefaultFilesConfig.K8sServiceAccount, rconfig.DefaultFilesConfig.OutputLocationPrefix, lpSpec) assert.Nil(t, err) - assert.Equal(t, &admin.AuthRole{KubernetesServiceAccount: "k8Account"}, lpSpec.AuthRole) - assert.Equal(t, &core.SecurityContext{RunAs: &core.Identity{K8SServiceAccount: "k8Account"}}, lpSpec.SecurityContext) + assert.Equal(t, &admin.AuthRole{KubernetesServiceAccount: "k8Account"}, lpSpec.GetAuthRole()) + assert.Equal(t, &core.SecurityContext{RunAs: &core.Identity{K8SServiceAccount: "k8Account"}}, lpSpec.GetSecurityContext()) }) t.Run("Both k8sService and IamRole", func(t *testing.T) { registerFilesSetup() @@ -379,8 +379,8 @@ func TestHydrateLaunchPlanSpec(t *testing.T) { err := hydrateLaunchPlanSpec(rconfig.DefaultFilesConfig.AssumableIamRole, rconfig.DefaultFilesConfig.K8sServiceAccount, rconfig.DefaultFilesConfig.OutputLocationPrefix, lpSpec) assert.Nil(t, err) assert.Equal(t, &admin.AuthRole{AssumableIamRole: "iamRole", - KubernetesServiceAccount: "k8Account"}, lpSpec.AuthRole) - assert.Equal(t, &core.SecurityContext{RunAs: &core.Identity{IamRole: "iamRole", K8SServiceAccount: "k8Account"}}, lpSpec.SecurityContext) + KubernetesServiceAccount: "k8Account"}, lpSpec.GetAuthRole()) + assert.Equal(t, &core.SecurityContext{RunAs: &core.Identity{IamRole: "iamRole", K8SServiceAccount: "k8Account"}}, lpSpec.GetSecurityContext()) }) t.Run("Output prefix", func(t *testing.T) { registerFilesSetup() @@ -388,7 +388,7 @@ func TestHydrateLaunchPlanSpec(t *testing.T) { lpSpec := &admin.LaunchPlanSpec{} err := hydrateLaunchPlanSpec(rconfig.DefaultFilesConfig.AssumableIamRole, rconfig.DefaultFilesConfig.K8sServiceAccount, rconfig.DefaultFilesConfig.OutputLocationPrefix, lpSpec) assert.Nil(t, err) - assert.Equal(t, &admin.RawOutputDataConfig{OutputLocationPrefix: "prefix"}, lpSpec.RawOutputDataConfig) + assert.Equal(t, &admin.RawOutputDataConfig{OutputLocationPrefix: "prefix"}, lpSpec.GetRawOutputDataConfig()) }) } @@ -648,7 +648,7 @@ func TestHydrateTaskSpec(t *testing.T) { err = hydrateTaskSpec(task, storage.DataReference("file://somewhere"), "sourcey") assert.NoError(t, err) var hydratedPodSpec = v1.PodSpec{} - err = utils.UnmarshalStructToObj(task.Template.GetK8SPod().PodSpec, &hydratedPodSpec) + err = utils.UnmarshalStructToObj(task.GetTemplate().GetK8SPod().GetPodSpec(), &hydratedPodSpec) assert.NoError(t, err) assert.Len(t, hydratedPodSpec.Containers[1].Args, 2) assert.Contains(t, hydratedPodSpec.Containers[1].Args[1], "somewhere") diff --git a/flytectl/cmd/update/execution.go b/flytectl/cmd/update/execution.go index 70f34b342f..efe8e64be7 100644 --- a/flytectl/cmd/update/execution.go +++ b/flytectl/cmd/update/execution.go @@ -36,13 +36,13 @@ func updateExecutionFunc(ctx context.Context, args []string, cmdCtx cmdCore.Comm project := config.GetConfig().Project domain := config.GetConfig().Domain if len(args) != 1 { - return fmt.Errorf(clierrors.ErrExecutionNotPassed) + return fmt.Errorf(clierrors.ErrExecutionNotPassed) //nolint } executionName := args[0] activate := execution.UConfig.Activate archive := execution.UConfig.Archive if activate && archive { - return fmt.Errorf(clierrors.ErrInvalidStateUpdate) + return fmt.Errorf(clierrors.ErrInvalidStateUpdate) //nolint } var newState admin.ExecutionState diff --git a/flytectl/cmd/update/execution_test.go b/flytectl/cmd/update/execution_test.go index fbcb0b02e9..4fde5683a5 100644 --- a/flytectl/cmd/update/execution_test.go +++ b/flytectl/cmd/update/execution_test.go @@ -28,7 +28,7 @@ func TestExecutionCanBeActivated(t *testing.T) { t, "UpdateExecution", s.Ctx, mock.MatchedBy( func(r *admin.ExecutionUpdateRequest) bool { - return r.State == admin.ExecutionState_EXECUTION_ACTIVE + return r.GetState() == admin.ExecutionState_EXECUTION_ACTIVE })) }) } @@ -47,7 +47,7 @@ func TestExecutionCanBeArchived(t *testing.T) { t, "UpdateExecution", s.Ctx, mock.MatchedBy( func(r *admin.ExecutionUpdateRequest) bool { - return r.State == admin.ExecutionState_EXECUTION_ARCHIVED + return r.GetState() == admin.ExecutionState_EXECUTION_ARCHIVED })) }) } @@ -146,7 +146,7 @@ func TestExecutionUpdateFailsWhenExecutionDoesNotExist(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, execution *admin.Execution) { s.FetcherExt. - OnFetchExecution(s.Ctx, execution.Id.Name, execution.Id.Project, execution.Id.Domain). + OnFetchExecution(s.Ctx, execution.GetId().GetName(), execution.GetId().GetProject(), execution.GetId().GetDomain()). Return(nil, ext.NewNotFoundError("execution not found")) s.MockAdminClient. OnUpdateExecutionMatch(s.Ctx, mock.Anything). @@ -165,7 +165,7 @@ func TestExecutionUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, execution *admin.Execution) { s.FetcherExt. - OnFetchExecution(s.Ctx, execution.Id.Name, execution.Id.Project, execution.Id.Domain). + OnFetchExecution(s.Ctx, execution.GetId().GetName(), execution.GetId().GetProject(), execution.GetId().GetDomain()). Return(execution, nil) s.MockAdminClient. OnUpdateExecutionMatch(s.Ctx, mock.Anything). @@ -200,7 +200,7 @@ func testExecutionUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, execution *admin.Execution) { s.FetcherExt. - OnFetchExecution(s.Ctx, execution.Id.Name, execution.Id.Project, execution.Id.Domain). + OnFetchExecution(s.Ctx, execution.GetId().GetName(), execution.GetId().GetProject(), execution.GetId().GetDomain()). Return(execution, nil) s.MockAdminClient. OnUpdateExecutionMatch(s.Ctx, mock.Anything). @@ -230,7 +230,7 @@ func testExecutionUpdateWithMockSetup( setup(&s, execution.UConfig, target) } - args := []string{target.Id.Name} + args := []string{target.GetId().GetName()} err := updateExecutionFunc(s.Ctx, args, s.CmdCtx) if asserter != nil { diff --git a/flytectl/cmd/update/launch_plan.go b/flytectl/cmd/update/launch_plan.go index 466551667e..c847e92e6b 100644 --- a/flytectl/cmd/update/launch_plan.go +++ b/flytectl/cmd/update/launch_plan.go @@ -36,12 +36,12 @@ func updateLPFunc(ctx context.Context, args []string, cmdCtx cmdCore.CommandCont project := config.GetConfig().Project domain := config.GetConfig().Domain if len(args) != 1 { - return fmt.Errorf(clierrors.ErrLPNotPassed) + return fmt.Errorf(clierrors.ErrLPNotPassed) //nolint } name := args[0] version := launchplan.UConfig.Version if len(version) == 0 { - return fmt.Errorf(clierrors.ErrLPVersionNotPassed) + return fmt.Errorf(clierrors.ErrLPVersionNotPassed) //nolint } activate := launchplan.UConfig.Activate @@ -55,7 +55,7 @@ func updateLPFunc(ctx context.Context, args []string, cmdCtx cmdCore.CommandCont deactivate = launchplan.UConfig.Deactivate } if activate == deactivate && deactivate { - return fmt.Errorf(clierrors.ErrInvalidBothStateUpdate) + return fmt.Errorf(clierrors.ErrInvalidBothStateUpdate) //nolint } var newState admin.LaunchPlanState diff --git a/flytectl/cmd/update/launch_plan_meta.go b/flytectl/cmd/update/launch_plan_meta.go index 7b1c93fd85..51b6c6769e 100644 --- a/flytectl/cmd/update/launch_plan_meta.go +++ b/flytectl/cmd/update/launch_plan_meta.go @@ -37,7 +37,7 @@ func getUpdateLPMetaFunc(namedEntityConfig *NamedEntityConfig) func(ctx context. project := config.GetConfig().Project domain := config.GetConfig().Domain if len(args) != 1 { - return fmt.Errorf(clierrors.ErrLPNotPassed) + return fmt.Errorf(clierrors.ErrLPNotPassed) //nolint } name := args[0] err := namedEntityConfig.UpdateNamedEntity(ctx, name, project, domain, core.ResourceType_LAUNCH_PLAN, cmdCtx) diff --git a/flytectl/cmd/update/launch_plan_test.go b/flytectl/cmd/update/launch_plan_test.go index 249a810118..4238a205d5 100644 --- a/flytectl/cmd/update/launch_plan_test.go +++ b/flytectl/cmd/update/launch_plan_test.go @@ -28,7 +28,7 @@ func TestLaunchPlanCanBeActivated(t *testing.T) { t, "UpdateLaunchPlan", s.Ctx, mock.MatchedBy( func(r *admin.LaunchPlanUpdateRequest) bool { - return r.State == admin.LaunchPlanState_ACTIVE + return r.GetState() == admin.LaunchPlanState_ACTIVE })) }) } @@ -47,7 +47,7 @@ func TestLaunchPlanCanBeArchived(t *testing.T) { t, "UpdateLaunchPlan", s.Ctx, mock.MatchedBy( func(r *admin.LaunchPlanUpdateRequest) bool { - return r.State == admin.LaunchPlanState_INACTIVE + return r.GetState() == admin.LaunchPlanState_INACTIVE })) }) } @@ -66,7 +66,7 @@ func TestLaunchPlanCanBeDeactivated(t *testing.T) { t, "UpdateLaunchPlan", s.Ctx, mock.MatchedBy( func(r *admin.LaunchPlanUpdateRequest) bool { - return r.State == admin.LaunchPlanState_INACTIVE + return r.GetState() == admin.LaunchPlanState_INACTIVE })) }) } @@ -275,8 +275,8 @@ func testLaunchPlanUpdateWithMockSetup( setup(&s, launchplan.UConfig, target) } - args := []string{target.Id.Name} - launchplan.UConfig.Version = target.Id.Version + args := []string{target.GetId().GetName()} + launchplan.UConfig.Version = target.GetId().GetVersion() err := updateLPFunc(s.Ctx, args, s.CmdCtx) if asserter != nil { diff --git a/flytectl/cmd/update/matchable_cluster_resource_attribute_test.go b/flytectl/cmd/update/matchable_cluster_resource_attribute_test.go index b7288d6dcc..90d4fca9f7 100644 --- a/flytectl/cmd/update/matchable_cluster_resource_attribute_test.go +++ b/flytectl/cmd/update/matchable_cluster_resource_attribute_test.go @@ -274,10 +274,10 @@ func TestClusterResourceAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testi t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_CLUSTER_RESOURCE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *clusterresourceattribute.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -296,10 +296,10 @@ func TestClusterResourceAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testi t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_CLUSTER_RESOURCE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *clusterresourceattribute.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -318,10 +318,10 @@ func TestClusterResourceAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testi t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_CLUSTER_RESOURCE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *clusterresourceattribute.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -342,10 +342,10 @@ func TestClusterResourceAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_CLUSTER_RESOURCE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *clusterresourceattribute.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -363,10 +363,10 @@ func TestClusterResourceAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_CLUSTER_RESOURCE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *clusterresourceattribute.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -384,10 +384,10 @@ func TestClusterResourceAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_CLUSTER_RESOURCE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *clusterresourceattribute.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -410,10 +410,10 @@ func testWorkflowClusterResourceAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_CLUSTER_RESOURCE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, setup, @@ -479,10 +479,10 @@ func testProjectClusterResourceAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_CLUSTER_RESOURCE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, setup, @@ -546,10 +546,10 @@ func testProjectDomainClusterResourceAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_CLUSTER_RESOURCE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_CLUSTER_RESOURCE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, setup, diff --git a/flytectl/cmd/update/matchable_execution_cluster_label_test.go b/flytectl/cmd/update/matchable_execution_cluster_label_test.go index 1006234626..ba14dbe535 100644 --- a/flytectl/cmd/update/matchable_execution_cluster_label_test.go +++ b/flytectl/cmd/update/matchable_execution_cluster_label_test.go @@ -274,10 +274,10 @@ func TestExecutionClusterLabelUpdateSucceedsWhenAttributesDoNotExist(t *testing. t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *executionclusterlabel.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -296,10 +296,10 @@ func TestExecutionClusterLabelUpdateSucceedsWhenAttributesDoNotExist(t *testing. t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *executionclusterlabel.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -318,10 +318,10 @@ func TestExecutionClusterLabelUpdateSucceedsWhenAttributesDoNotExist(t *testing. t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *executionclusterlabel.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -342,10 +342,10 @@ func TestExecutionClusterLabelUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *executionclusterlabel.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -363,10 +363,10 @@ func TestExecutionClusterLabelUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *executionclusterlabel.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -384,10 +384,10 @@ func TestExecutionClusterLabelUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *executionclusterlabel.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -410,10 +410,10 @@ func testWorkflowExecutionClusterLabelUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, setup, @@ -475,10 +475,10 @@ func testProjectExecutionClusterLabelUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, setup, @@ -538,10 +538,10 @@ func testProjectDomainExecutionClusterLabelUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_EXECUTION_CLUSTER_LABEL). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_EXECUTION_CLUSTER_LABEL). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, setup, diff --git a/flytectl/cmd/update/matchable_execution_queue_attribute_test.go b/flytectl/cmd/update/matchable_execution_queue_attribute_test.go index e16526faa6..a240dfdd98 100644 --- a/flytectl/cmd/update/matchable_execution_queue_attribute_test.go +++ b/flytectl/cmd/update/matchable_execution_queue_attribute_test.go @@ -274,10 +274,10 @@ func TestExecutionQueueAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testin t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_EXECUTION_QUEUE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *executionqueueattribute.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -296,10 +296,10 @@ func TestExecutionQueueAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testin t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_EXECUTION_QUEUE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *executionqueueattribute.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -318,10 +318,10 @@ func TestExecutionQueueAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testin t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_EXECUTION_QUEUE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *executionqueueattribute.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -342,10 +342,10 @@ func TestExecutionQueueAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_EXECUTION_QUEUE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *executionqueueattribute.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -363,10 +363,10 @@ func TestExecutionQueueAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_EXECUTION_QUEUE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *executionqueueattribute.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -384,10 +384,10 @@ func TestExecutionQueueAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_EXECUTION_QUEUE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *executionqueueattribute.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -410,10 +410,10 @@ func testWorkflowExecutionQueueAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_EXECUTION_QUEUE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, setup, @@ -479,10 +479,10 @@ func testProjectExecutionQueueAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_EXECUTION_QUEUE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, setup, @@ -546,10 +546,10 @@ func testProjectDomainExecutionQueueAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_EXECUTION_QUEUE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_EXECUTION_QUEUE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, setup, diff --git a/flytectl/cmd/update/matchable_plugin_override_test.go b/flytectl/cmd/update/matchable_plugin_override_test.go index 3b0181392b..649619be03 100644 --- a/flytectl/cmd/update/matchable_plugin_override_test.go +++ b/flytectl/cmd/update/matchable_plugin_override_test.go @@ -274,10 +274,10 @@ func TestPluginOverrideUpdateSucceedsWhenAttributesDoNotExist(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *pluginoverride.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -296,10 +296,10 @@ func TestPluginOverrideUpdateSucceedsWhenAttributesDoNotExist(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *pluginoverride.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -318,10 +318,10 @@ func TestPluginOverrideUpdateSucceedsWhenAttributesDoNotExist(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *pluginoverride.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -342,10 +342,10 @@ func TestPluginOverrideUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *pluginoverride.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -363,10 +363,10 @@ func TestPluginOverrideUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *pluginoverride.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -384,10 +384,10 @@ func TestPluginOverrideUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *pluginoverride.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -410,10 +410,10 @@ func testWorkflowPluginOverrideUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, setup, @@ -485,10 +485,10 @@ func testProjectPluginOverrideUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, setup, @@ -558,10 +558,10 @@ func testProjectDomainPluginOverrideUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_PLUGIN_OVERRIDE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_PLUGIN_OVERRIDE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, setup, diff --git a/flytectl/cmd/update/matchable_task_resource_attribute_test.go b/flytectl/cmd/update/matchable_task_resource_attribute_test.go index 42c2c3ab4f..2fffe2b5ec 100644 --- a/flytectl/cmd/update/matchable_task_resource_attribute_test.go +++ b/flytectl/cmd/update/matchable_task_resource_attribute_test.go @@ -274,10 +274,10 @@ func TestTaskResourceAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testing. t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_TASK_RESOURCE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_TASK_RESOURCE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *taskresourceattribute.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -296,10 +296,10 @@ func TestTaskResourceAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testing. t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_TASK_RESOURCE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_TASK_RESOURCE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *taskresourceattribute.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -318,10 +318,10 @@ func TestTaskResourceAttributeUpdateSucceedsWhenAttributesDoNotExist(t *testing. t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_TASK_RESOURCE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_TASK_RESOURCE). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *taskresourceattribute.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -342,10 +342,10 @@ func TestTaskResourceAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_TASK_RESOURCE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_TASK_RESOURCE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *taskresourceattribute.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -363,10 +363,10 @@ func TestTaskResourceAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_TASK_RESOURCE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_TASK_RESOURCE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *taskresourceattribute.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -384,10 +384,10 @@ func TestTaskResourceAttributeUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_TASK_RESOURCE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_TASK_RESOURCE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *taskresourceattribute.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -410,10 +410,10 @@ func testWorkflowTaskResourceAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_TASK_RESOURCE). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_TASK_RESOURCE). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, setup, @@ -478,10 +478,10 @@ func testProjectTaskResourceAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_TASK_RESOURCE). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_TASK_RESOURCE). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, setup, @@ -544,10 +544,10 @@ func testProjectDomainTaskResourceAttributeUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_TASK_RESOURCE). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_TASK_RESOURCE). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, setup, diff --git a/flytectl/cmd/update/matchable_workflow_execution_config_test.go b/flytectl/cmd/update/matchable_workflow_execution_config_test.go index c75b2fd58f..e026a91a9b 100644 --- a/flytectl/cmd/update/matchable_workflow_execution_config_test.go +++ b/flytectl/cmd/update/matchable_workflow_execution_config_test.go @@ -274,10 +274,10 @@ func TestWorkflowExecutionConfigUpdateSucceedsWhenAttributesDoNotExist(t *testin t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *workflowexecutionconfig.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -296,10 +296,10 @@ func TestWorkflowExecutionConfigUpdateSucceedsWhenAttributesDoNotExist(t *testin t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *workflowexecutionconfig.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -318,10 +318,10 @@ func TestWorkflowExecutionConfigUpdateSucceedsWhenAttributesDoNotExist(t *testin t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(nil, ext.NewNotFoundError("attribute")) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, /* setup */ func(s *testutils.TestStruct, config *workflowexecutionconfig.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -342,10 +342,10 @@ func TestWorkflowExecutionConfigUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *workflowexecutionconfig.AttrUpdateConfig, target *admin.WorkflowAttributes) { @@ -363,10 +363,10 @@ func TestWorkflowExecutionConfigUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *workflowexecutionconfig.AttrUpdateConfig, target *admin.ProjectDomainAttributes) { @@ -384,10 +384,10 @@ func TestWorkflowExecutionConfigUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(fmt.Errorf("network error")) }, /* setup */ func(s *testutils.TestStruct, config *workflowexecutionconfig.AttrUpdateConfig, target *admin.ProjectAttributes) { @@ -410,10 +410,10 @@ func testWorkflowExecutionConfigUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.WorkflowAttributes) { s.FetcherExt. - OnFetchWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(&admin.WorkflowAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateWorkflowAttributesMatch(s.Ctx, target.Project, target.Domain, target.Workflow, mock.Anything). + OnUpdateWorkflowAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), target.GetWorkflow(), mock.Anything). Return(nil) }, setup, @@ -482,10 +482,10 @@ func testProjectWorkflowExecutionConfigUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectAttributes) { s.FetcherExt. - OnFetchProjectAttributesMatch(s.Ctx, target.Project, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchProjectAttributesMatch(s.Ctx, target.GetProject(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(&admin.ProjectAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectAttributesMatch(s.Ctx, target.Project, mock.Anything). + OnUpdateProjectAttributesMatch(s.Ctx, target.GetProject(), mock.Anything). Return(nil) }, setup, @@ -552,10 +552,10 @@ func testProjectDomainWorkflowExecutionConfigUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, target *admin.ProjectDomainAttributes) { s.FetcherExt. - OnFetchProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). + OnFetchProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), admin.MatchableResource_WORKFLOW_EXECUTION_CONFIG). Return(&admin.ProjectDomainAttributesGetResponse{Attributes: target}, nil) s.UpdaterExt. - OnUpdateProjectDomainAttributesMatch(s.Ctx, target.Project, target.Domain, mock.Anything). + OnUpdateProjectDomainAttributesMatch(s.Ctx, target.GetProject(), target.GetDomain(), mock.Anything). Return(nil) }, setup, diff --git a/flytectl/cmd/update/named_entity.go b/flytectl/cmd/update/named_entity.go index 61bbffc705..5e99775e14 100644 --- a/flytectl/cmd/update/named_entity.go +++ b/flytectl/cmd/update/named_entity.go @@ -28,7 +28,7 @@ type NamedEntityConfig struct { func (cfg NamedEntityConfig) UpdateNamedEntity(ctx context.Context, name string, project string, domain string, rsType core.ResourceType, cmdCtx cmdCore.CommandContext) error { if cfg.Activate && cfg.Archive { - return fmt.Errorf(clierrors.ErrInvalidStateUpdate) + return fmt.Errorf(clierrors.ErrInvalidStateUpdate) //nolint } id := &admin.NamedEntityIdentifier{ @@ -45,7 +45,7 @@ func (cfg NamedEntityConfig) UpdateNamedEntity(ctx context.Context, name string, return fmt.Errorf("update metadata for %s: could not fetch metadata: %w", name, err) } - oldMetadata, newMetadata := composeNamedMetadataEdits(cfg, namedEntity.Metadata) + oldMetadata, newMetadata := composeNamedMetadataEdits(cfg, namedEntity.GetMetadata()) patch, err := DiffAsYaml(diffPathBefore, diffPathAfter, oldMetadata, newMetadata) if err != nil { panic(err) @@ -86,15 +86,15 @@ func composeNamedMetadataEdits(config NamedEntityConfig, current *admin.NamedEnt case config.Activate && config.Archive: panic("cannot both activate and archive") case config.Activate: - old.State = current.State + old.State = current.GetState() new.State = admin.NamedEntityState_NAMED_ENTITY_ACTIVE case config.Archive: - old.State = current.State + old.State = current.GetState() new.State = admin.NamedEntityState_NAMED_ENTITY_ARCHIVED } if config.Description != "" { - old.Description = current.Description + old.Description = current.GetDescription() new.Description = config.Description } diff --git a/flytectl/cmd/update/named_entity_test.go b/flytectl/cmd/update/named_entity_test.go index 4d4e5b2783..1f8e28a525 100644 --- a/flytectl/cmd/update/named_entity_test.go +++ b/flytectl/cmd/update/named_entity_test.go @@ -59,7 +59,7 @@ func testNamedEntityUpdateWithMockSetup( updateMetadataFactory := getUpdateMetadataFactory(resourceType) - args := []string{target.Id.Name} + args := []string{target.GetId().GetName()} err := updateMetadataFactory(config)(s.Ctx, args, s.CmdCtx) if asserter != nil { diff --git a/flytectl/cmd/update/project.go b/flytectl/cmd/update/project.go index f6196e35ff..3a779df476 100644 --- a/flytectl/cmd/update/project.go +++ b/flytectl/cmd/update/project.go @@ -103,13 +103,13 @@ func updateProjectsFunc(ctx context.Context, args []string, cmdCtx cmdCore.Comma return err } - if edits.Id == "" { - return fmt.Errorf(clierrors.ErrProjectNotPassed) + if edits.GetId() == "" { + return fmt.Errorf(clierrors.ErrProjectNotPassed) //nolint } - currentProject, err := cmdCtx.AdminFetcherExt().GetProjectByID(ctx, edits.Id) + currentProject, err := cmdCtx.AdminFetcherExt().GetProjectByID(ctx, edits.GetId()) if err != nil { - return fmt.Errorf("update project %s: could not fetch project: %w", edits.Id, err) + return fmt.Errorf("update project %s: could not fetch project: %w", edits.GetId(), err) } // We do not compare currentProject against edits directly, because edits does not @@ -139,10 +139,10 @@ func updateProjectsFunc(ctx context.Context, args []string, cmdCtx cmdCore.Comma _, err = cmdCtx.AdminClient().UpdateProject(ctx, edits) if err != nil { - return fmt.Errorf(clierrors.ErrFailedProjectUpdate, edits.Id, err) + return fmt.Errorf(clierrors.ErrFailedProjectUpdate, edits.GetId(), err) } - fmt.Printf("project %s updated\n", edits.Id) + fmt.Printf("project %s updated\n", edits.GetId()) return nil } @@ -152,14 +152,14 @@ func updateProjectsFunc(ctx context.Context, args []string, cmdCtx cmdCore.Comma func copyProjectWithEdits(target *admin.Project, edited *admin.Project, projectConfig *project.ConfigProject) *admin.Project { copy := *target - if edited.Name != "" { - copy.Name = edited.Name + if edited.GetName() != "" { + copy.Name = edited.GetName() } - if edited.Description != "" { - copy.Description = edited.Description + if edited.GetDescription() != "" { + copy.Description = edited.GetDescription() } if len(edited.GetLabels().GetValues()) != 0 { - copy.Labels = edited.Labels + copy.Labels = edited.GetLabels() } // `edited` comes with `admin.Project_ACTIVE` state by default @@ -182,9 +182,9 @@ func copyProjectWithEdits(target *admin.Project, edited *admin.Project, projectC // YAML file input, and the flags for `ConfigProject` would also // be good. if projectConfig.Archive || projectConfig.Activate { - copy.State = edited.State + copy.State = edited.GetState() } else { - edited.State = copy.State + edited.State = copy.GetState() } return © } diff --git a/flytectl/cmd/update/project_test.go b/flytectl/cmd/update/project_test.go index 0ca41c4309..2451163942 100644 --- a/flytectl/cmd/update/project_test.go +++ b/flytectl/cmd/update/project_test.go @@ -27,7 +27,7 @@ func TestProjectCanBeActivated(t *testing.T) { t, "UpdateProject", s.Ctx, mock.MatchedBy( func(r *admin.Project) bool { - return r.State == admin.Project_ACTIVE + return r.GetState() == admin.Project_ACTIVE })) }) } @@ -46,7 +46,7 @@ func TestProjectCanBeArchived(t *testing.T) { t, "UpdateProject", s.Ctx, mock.MatchedBy( func(r *admin.Project) bool { - return r.State == admin.Project_ARCHIVED + return r.GetState() == admin.Project_ARCHIVED })) }) } @@ -145,7 +145,7 @@ func TestProjectUpdateFailsWhenProjectDoesNotExist(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, project *admin.Project) { s.FetcherExt. - OnGetProjectByID(s.Ctx, project.Id). + OnGetProjectByID(s.Ctx, project.GetId()). Return(nil, ext.NewNotFoundError("project not found")) s.MockAdminClient. OnUpdateProjectMatch(s.Ctx, mock.Anything). @@ -164,7 +164,7 @@ func TestProjectUpdateFailsWhenAdminClientFails(t *testing.T) { t, /* mockSetup */ func(s *testutils.TestStruct, project *admin.Project) { s.FetcherExt. - OnGetProjectByID(s.Ctx, project.Id). + OnGetProjectByID(s.Ctx, project.GetId()). Return(project, nil) s.MockAdminClient. OnUpdateProjectMatch(s.Ctx, mock.Anything). @@ -209,7 +209,7 @@ func TestProjectUpdateDoesNotActivateArchivedProject(t *testing.T) { t, "UpdateProject", s.Ctx, mock.MatchedBy( func(r *admin.Project) bool { - return r.State == admin.Project_ARCHIVED + return r.GetState() == admin.Project_ARCHIVED })) }) } @@ -223,7 +223,7 @@ func testProjectUpdate( t, /* mockSetup */ func(s *testutils.TestStruct, project *admin.Project) { s.FetcherExt. - OnGetProjectByID(s.Ctx, project.Id). + OnGetProjectByID(s.Ctx, project.GetId()). Return(project, nil) s.MockAdminClient. OnUpdateProjectMatch(s.Ctx, mock.Anything). @@ -249,7 +249,7 @@ func testProjectUpdateWithMockSetup( } project.DefaultProjectConfig = &project.ConfigProject{ - ID: target.Id, + ID: target.GetId(), } config.GetConfig().Project = "" config.GetConfig().Domain = "" diff --git a/flytectl/cmd/update/task_meta.go b/flytectl/cmd/update/task_meta.go index 3783c2dcfc..8e68778c99 100644 --- a/flytectl/cmd/update/task_meta.go +++ b/flytectl/cmd/update/task_meta.go @@ -37,7 +37,7 @@ func getUpdateTaskFunc(namedEntityConfig *NamedEntityConfig) func(ctx context.Co project := config.GetConfig().Project domain := config.GetConfig().Domain if len(args) != 1 { - return fmt.Errorf(clierrors.ErrTaskNotPassed) + return fmt.Errorf(clierrors.ErrTaskNotPassed) //nolint } name := args[0] diff --git a/flytectl/cmd/update/workflow_meta.go b/flytectl/cmd/update/workflow_meta.go index e2a416e0aa..c6604bfb86 100644 --- a/flytectl/cmd/update/workflow_meta.go +++ b/flytectl/cmd/update/workflow_meta.go @@ -37,7 +37,7 @@ func getUpdateWorkflowFunc(namedEntityConfig *NamedEntityConfig) func(ctx contex project := config.GetConfig().Project domain := config.GetConfig().Domain if len(args) != 1 { - return fmt.Errorf(clierrors.ErrWorkflowNotPassed) + return fmt.Errorf(clierrors.ErrWorkflowNotPassed) //nolint } name := args[0] err := namedEntityConfig.UpdateNamedEntity(ctx, name, project, domain, core.ResourceType_WORKFLOW, cmdCtx) diff --git a/flytectl/cmd/version/version.go b/flytectl/cmd/version/version.go index 88da1330a2..67a28f3531 100644 --- a/flytectl/cmd/version/version.go +++ b/flytectl/cmd/version/version.go @@ -103,9 +103,9 @@ func getControlPlaneVersion(ctx context.Context, cmdCtx cmdCore.CommandContext) } // Print FlyteAdmin if err := printVersion(versionOutput{ - Build: v.ControlPlaneVersion.Build, - BuildTime: v.ControlPlaneVersion.BuildTime, - Version: v.ControlPlaneVersion.Version, + Build: v.GetControlPlaneVersion().GetBuild(), + BuildTime: v.GetControlPlaneVersion().GetBuildTime(), + Version: v.GetControlPlaneVersion().GetVersion(), App: controlPlanAppName, }); err != nil { return fmt.Errorf("Unable to get the control plane version. Please try again: %v", err) diff --git a/flytectl/docs/docs-requirements.in b/flytectl/docs/docs-requirements.in new file mode 100644 index 0000000000..9d2c7c0a35 --- /dev/null +++ b/flytectl/docs/docs-requirements.in @@ -0,0 +1,8 @@ +sphinx +sphinx-panels +sphinx-prompt +furo +sphinx-copybutton +sphinx-reredirects +sphinx-fontawesome +sphinxcontrib-youtube diff --git a/flytectl/docs/docs-requirements.txt b/flytectl/docs/docs-requirements.txt new file mode 100644 index 0000000000..2f17c12754 --- /dev/null +++ b/flytectl/docs/docs-requirements.txt @@ -0,0 +1,87 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile docs-requirements.in -o docs-requirements.txt +alabaster==1.0.0 + # via sphinx +babel==2.16.0 + # via sphinx +beautifulsoup4==4.12.3 + # via furo +certifi==2024.8.30 + # via + # requests + # sphinx-prompt +charset-normalizer==3.4.0 + # via requests +docutils==0.21.2 + # via + # sphinx + # sphinx-panels + # sphinx-prompt +furo==2024.8.6 + # via -r docs-requirements.in +idna==3.10 + # via + # requests + # sphinx-prompt +imagesize==1.4.1 + # via sphinx +jinja2==3.1.4 + # via sphinx +markupsafe==3.0.2 + # via jinja2 +packaging==24.1 + # via sphinx +pygments==2.18.0 + # via + # furo + # sphinx + # sphinx-prompt +requests==2.32.3 + # via + # sphinx + # sphinxcontrib-youtube +snowballstemmer==2.2.0 + # via sphinx +soupsieve==2.6 + # via beautifulsoup4 +sphinx==8.1.3 + # via + # -r docs-requirements.in + # furo + # sphinx-basic-ng + # sphinx-copybutton + # sphinx-fontawesome + # sphinx-panels + # sphinx-prompt + # sphinx-reredirects + # sphinxcontrib-youtube +sphinx-basic-ng==1.0.0b2 + # via furo +sphinx-copybutton==0.5.2 + # via -r docs-requirements.in +sphinx-fontawesome==0.0.6 + # via -r docs-requirements.in +sphinx-panels==0.4.1 + # via -r docs-requirements.in +sphinx-prompt==1.9.0 + # via -r docs-requirements.in +sphinx-reredirects==0.1.5 + # via -r docs-requirements.in +sphinxcontrib-applehelp==2.0.0 + # via sphinx +sphinxcontrib-devhelp==2.0.0 + # via sphinx +sphinxcontrib-htmlhelp==2.1.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==2.0.0 + # via sphinx +sphinxcontrib-serializinghtml==2.0.0 + # via sphinx +sphinxcontrib-youtube==1.4.1 + # via -r docs-requirements.in +urllib3==2.2.3 + # via + # requests + # sphinx-prompt diff --git a/flytectl/docs/source/gen/flytectl.rst b/flytectl/docs/source/gen/flytectl.rst index 9109b234bd..b42c57d922 100644 --- a/flytectl/docs/source/gen/flytectl.rst +++ b/flytectl/docs/source/gen/flytectl.rst @@ -34,10 +34,12 @@ Options --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -59,10 +61,17 @@ Options --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. -h, --help help for flytectl + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_compile.rst b/flytectl/docs/source/gen/flytectl_compile.rst index 6ce685df5b..0df8a2c70a 100644 --- a/flytectl/docs/source/gen/flytectl_compile.rst +++ b/flytectl/docs/source/gen/flytectl_compile.rst @@ -60,10 +60,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -84,10 +86,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_completion.rst b/flytectl/docs/source/gen/flytectl_completion.rst index 5671d3453b..01d8a2e800 100644 --- a/flytectl/docs/source/gen/flytectl_completion.rst +++ b/flytectl/docs/source/gen/flytectl_completion.rst @@ -107,10 +107,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -131,10 +133,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_config.rst b/flytectl/docs/source/gen/flytectl_config.rst index be0ef53651..ba559e4fdf 100644 --- a/flytectl/docs/source/gen/flytectl_config.rst +++ b/flytectl/docs/source/gen/flytectl_config.rst @@ -44,10 +44,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -68,10 +70,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_config_discover.rst b/flytectl/docs/source/gen/flytectl_config_discover.rst index 6727e00da8..d09fbaf241 100644 --- a/flytectl/docs/source/gen/flytectl_config_discover.rst +++ b/flytectl/docs/source/gen/flytectl_config_discover.rst @@ -45,10 +45,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -71,10 +73,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_config_docs.rst b/flytectl/docs/source/gen/flytectl_config_docs.rst index 06d0969e83..30f0bfe1a8 100644 --- a/flytectl/docs/source/gen/flytectl_config_docs.rst +++ b/flytectl/docs/source/gen/flytectl_config_docs.rst @@ -45,10 +45,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -71,10 +73,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_config_init.rst b/flytectl/docs/source/gen/flytectl_config_init.rst index ea2a964d67..27b43bbe52 100644 --- a/flytectl/docs/source/gen/flytectl_config_init.rst +++ b/flytectl/docs/source/gen/flytectl_config_init.rst @@ -81,10 +81,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -107,10 +109,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_config_validate.rst b/flytectl/docs/source/gen/flytectl_config_validate.rst index 41a5511b11..d0ed7d1b1a 100644 --- a/flytectl/docs/source/gen/flytectl_config_validate.rst +++ b/flytectl/docs/source/gen/flytectl_config_validate.rst @@ -47,10 +47,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -73,10 +75,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_create.rst b/flytectl/docs/source/gen/flytectl_create.rst index 8827dc20e9..6dc452ec4b 100644 --- a/flytectl/docs/source/gen/flytectl_create.rst +++ b/flytectl/docs/source/gen/flytectl_create.rst @@ -46,10 +46,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -70,10 +72,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_create_execution.rst b/flytectl/docs/source/gen/flytectl_create_execution.rst index a51529dcbd..91c2ab15d4 100644 --- a/flytectl/docs/source/gen/flytectl_create_execution.rst +++ b/flytectl/docs/source/gen/flytectl_create_execution.rst @@ -75,20 +75,40 @@ The generated spec file can be modified to change the envs values, as shown belo task: core.control_flow.merge_sort.merge version: "v2" -4. Run the execution by passing the generated YAML file. +4. [Optional] Update the TargetExecutionCluster, if needed. +The generated spec file can be modified to change the TargetExecutionCluster values, as shown below: + +.. code-block:: yaml + + iamRoleARN: "" + inputs: + sorted_list1: + - 0 + sorted_list2: + - 0 + envs: + foo: bar + kubeServiceAcct: "" + targetDomain: "" + targetProject: "" + targetExecutionCluster: "" + task: core.control_flow.merge_sort.merge + version: "v2" + +5. Run the execution by passing the generated YAML file. The file can then be passed through the command line. It is worth noting that the source's and target's project and domain can be different. :: flytectl create execution --execFile execution_spec.yaml -p flytesnacks -d staging --targetProject flytesnacks -5. To relaunch an execution, pass the current execution ID as follows: +6. To relaunch an execution, pass the current execution ID as follows: :: flytectl create execution --relaunch ffb31066a0f8b4d52b77 -p flytesnacks -d development -6. To recover an execution, i.e., recreate it from the last known failure point for previously-run workflow execution, run: +7. To recover an execution, i.e., recreate it from the last known failure point for previously-run workflow execution, run: :: @@ -96,7 +116,7 @@ It is worth noting that the source's and target's project and domain can be diff See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. -7. You can create executions idempotently by naming them. This is also a way to *name* an execution for discovery. Note, +8. You can create executions idempotently by naming them. This is also a way to *name* an execution for discovery. Note, an execution id has to be unique within a project domain. So if the *name* matches an existing execution an already exists exceptioj will be raised. @@ -104,7 +124,7 @@ will be raised. flytectl create execution --recover ffb31066a0f8b4d52b77 -p flytesnacks -d development custom_name -8. Generic/Struct/Dataclass/JSON types are supported for execution in a similar manner. +9. Generic/Struct/Dataclass/JSON types are supported for execution in a similar manner. The following is an example of how generic data can be specified while creating the execution. :: @@ -124,7 +144,7 @@ The generated file would look similar to this. Here, empty values have been dump task: core.type_system.custom_objects.add version: v3 -9. Modified file with struct data populated for 'x' and 'y' parameters for the task "core.type_system.custom_objects.add": +10. Modified file with struct data populated for 'x' and 'y' parameters for the task "core.type_system.custom_objects.add": :: @@ -148,7 +168,7 @@ The generated file would look similar to this. Here, empty values have been dump task: core.type_system.custom_objects.add version: v3 -10. If you have configured a plugin that implements github.com/flyteorg/flyteadmin/pkg/workflowengine/interfaces/WorkflowExecutor +11. If you have configured a plugin that implements github.com/flyteorg/flyteadmin/pkg/workflowengine/interfaces/WorkflowExecutor that supports cluster pools, then when creating a new execution, you can assign it to a specific cluster pool: :: @@ -165,20 +185,21 @@ Options :: - --clusterPool string specify which cluster pool to assign execution to. - --dryRun execute command without making any modifications. - --execFile string file for the execution params. If not specified defaults to <_name>.execution_spec.yaml - -h, --help help for execution - --iamRoleARN string iam role ARN AuthRole for launching execution. - --kubeServiceAcct string kubernetes service account AuthRole for launching execution. - --overwriteCache skip cached results when performing execution, causing all outputs to be re-calculated and stored data to be overwritten. Does not work for recovered executions. - --recover string execution id to be recreated from the last known failure point. - --relaunch string execution id to be relaunched. - --targetDomain string project where execution needs to be created. If not specified configured domain would be used. - --targetProject string project where execution needs to be created. If not specified configured project would be used. - --task string - --version string specify version of execution workflow/task. - --workflow string + --clusterPool string specify which cluster pool to assign execution to. + --dryRun execute command without making any modifications. + --execFile string file for the execution params. If not specified defaults to <_name>.execution_spec.yaml + -h, --help help for execution + --iamRoleARN string iam role ARN AuthRole for launching execution. + --kubeServiceAcct string kubernetes service account AuthRole for launching execution. + --overwriteCache skip cached results when performing execution, causing all outputs to be re-calculated and stored data to be overwritten. Does not work for recovered executions. + --recover string execution id to be recreated from the last known failure point. + --relaunch string execution id to be relaunched. + --targetDomain string domain where execution needs to be created. If not specified configured domain would be used. + --targetExecutionCluster string cluster where execution needs to be created. If not specific the default would be used. + --targetProject string project where execution needs to be created. If not specified configured project would be used. + --task string + --version string specify version of execution workflow/task. + --workflow string Options inherited from parent commands ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -203,10 +224,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -227,10 +250,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_create_project.rst b/flytectl/docs/source/gen/flytectl_create_project.rst index 16dedc8af4..a7a0e829f4 100644 --- a/flytectl/docs/source/gen/flytectl_create_project.rst +++ b/flytectl/docs/source/gen/flytectl_create_project.rst @@ -83,10 +83,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -107,10 +109,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete.rst b/flytectl/docs/source/gen/flytectl_delete.rst index 485404ade9..2d87f99afc 100644 --- a/flytectl/docs/source/gen/flytectl_delete.rst +++ b/flytectl/docs/source/gen/flytectl_delete.rst @@ -46,10 +46,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -70,10 +72,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete_cluster-resource-attribute.rst b/flytectl/docs/source/gen/flytectl_delete_cluster-resource-attribute.rst index 5264b9046e..50d732c682 100644 --- a/flytectl/docs/source/gen/flytectl_delete_cluster-resource-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_delete_cluster-resource-attribute.rst @@ -81,10 +81,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -105,10 +107,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete_execution-cluster-label.rst b/flytectl/docs/source/gen/flytectl_delete_execution-cluster-label.rst index 3314aba82e..c99be61780 100644 --- a/flytectl/docs/source/gen/flytectl_delete_execution-cluster-label.rst +++ b/flytectl/docs/source/gen/flytectl_delete_execution-cluster-label.rst @@ -78,10 +78,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -102,10 +104,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete_execution-queue-attribute.rst b/flytectl/docs/source/gen/flytectl_delete_execution-queue-attribute.rst index 57bd1c6d04..5bae8553eb 100644 --- a/flytectl/docs/source/gen/flytectl_delete_execution-queue-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_delete_execution-queue-attribute.rst @@ -82,10 +82,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -106,10 +108,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete_execution.rst b/flytectl/docs/source/gen/flytectl_delete_execution.rst index c336e66abf..8f325aeb0c 100644 --- a/flytectl/docs/source/gen/flytectl_delete_execution.rst +++ b/flytectl/docs/source/gen/flytectl_delete_execution.rst @@ -89,10 +89,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -113,10 +115,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete_plugin-override.rst b/flytectl/docs/source/gen/flytectl_delete_plugin-override.rst index 58e26d4457..902a175aff 100644 --- a/flytectl/docs/source/gen/flytectl_delete_plugin-override.rst +++ b/flytectl/docs/source/gen/flytectl_delete_plugin-override.rst @@ -83,10 +83,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -107,10 +109,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete_task-resource-attribute.rst b/flytectl/docs/source/gen/flytectl_delete_task-resource-attribute.rst index f523a7717e..ee00a740c4 100644 --- a/flytectl/docs/source/gen/flytectl_delete_task-resource-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_delete_task-resource-attribute.rst @@ -83,10 +83,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -107,10 +109,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_delete_workflow-execution-config.rst b/flytectl/docs/source/gen/flytectl_delete_workflow-execution-config.rst index 389dad93be..5403ab1a26 100644 --- a/flytectl/docs/source/gen/flytectl_delete_workflow-execution-config.rst +++ b/flytectl/docs/source/gen/flytectl_delete_workflow-execution-config.rst @@ -81,10 +81,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -105,10 +107,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_demo.rst b/flytectl/docs/source/gen/flytectl_demo.rst index 2176c7b95d..d0b10b555c 100644 --- a/flytectl/docs/source/gen/flytectl_demo.rst +++ b/flytectl/docs/source/gen/flytectl_demo.rst @@ -64,10 +64,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -88,10 +90,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_demo_exec.rst b/flytectl/docs/source/gen/flytectl_demo_exec.rst index 8a0c9c4861..ff649fa3ff 100644 --- a/flytectl/docs/source/gen/flytectl_demo_exec.rst +++ b/flytectl/docs/source/gen/flytectl_demo_exec.rst @@ -53,10 +53,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -77,10 +79,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_demo_reload.rst b/flytectl/docs/source/gen/flytectl_demo_reload.rst index 9fccacec1c..a971d11a4e 100644 --- a/flytectl/docs/source/gen/flytectl_demo_reload.rst +++ b/flytectl/docs/source/gen/flytectl_demo_reload.rst @@ -66,10 +66,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -90,10 +92,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_demo_start.rst b/flytectl/docs/source/gen/flytectl_demo_start.rst index 89ed10f585..669aac6409 100644 --- a/flytectl/docs/source/gen/flytectl_demo_start.rst +++ b/flytectl/docs/source/gen/flytectl_demo_start.rst @@ -127,10 +127,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -151,10 +153,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_demo_status.rst b/flytectl/docs/source/gen/flytectl_demo_status.rst index 3d21334326..0647451f30 100644 --- a/flytectl/docs/source/gen/flytectl_demo_status.rst +++ b/flytectl/docs/source/gen/flytectl_demo_status.rst @@ -53,10 +53,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -77,10 +79,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_demo_teardown.rst b/flytectl/docs/source/gen/flytectl_demo_teardown.rst index 42d9c22630..05c9e1a462 100644 --- a/flytectl/docs/source/gen/flytectl_demo_teardown.rst +++ b/flytectl/docs/source/gen/flytectl_demo_teardown.rst @@ -54,10 +54,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -78,10 +80,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get.rst b/flytectl/docs/source/gen/flytectl_get.rst index f48ddf3ef8..9bae30c3f5 100644 --- a/flytectl/docs/source/gen/flytectl_get.rst +++ b/flytectl/docs/source/gen/flytectl_get.rst @@ -46,10 +46,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -70,10 +72,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_cluster-resource-attribute.rst b/flytectl/docs/source/gen/flytectl_get_cluster-resource-attribute.rst index b242491a88..0046a883d2 100644 --- a/flytectl/docs/source/gen/flytectl_get_cluster-resource-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_get_cluster-resource-attribute.rst @@ -89,10 +89,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -113,10 +115,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_execution-cluster-label.rst b/flytectl/docs/source/gen/flytectl_get_execution-cluster-label.rst index 9147ff0736..31837c402d 100644 --- a/flytectl/docs/source/gen/flytectl_get_execution-cluster-label.rst +++ b/flytectl/docs/source/gen/flytectl_get_execution-cluster-label.rst @@ -88,10 +88,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -112,10 +114,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_execution-queue-attribute.rst b/flytectl/docs/source/gen/flytectl_get_execution-queue-attribute.rst index 71929da6ae..0a4e14d01b 100644 --- a/flytectl/docs/source/gen/flytectl_get_execution-queue-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_get_execution-queue-attribute.rst @@ -91,10 +91,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -115,10 +117,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_execution.rst b/flytectl/docs/source/gen/flytectl_get_execution.rst index 56ed2a6a70..98898d2872 100644 --- a/flytectl/docs/source/gen/flytectl_get_execution.rst +++ b/flytectl/docs/source/gen/flytectl_get_execution.rst @@ -121,10 +121,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -145,10 +147,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_launchplan.rst b/flytectl/docs/source/gen/flytectl_get_launchplan.rst index e340b86636..d8855340b5 100644 --- a/flytectl/docs/source/gen/flytectl_get_launchplan.rst +++ b/flytectl/docs/source/gen/flytectl_get_launchplan.rst @@ -156,10 +156,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -180,10 +182,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_plugin-override.rst b/flytectl/docs/source/gen/flytectl_get_plugin-override.rst index bf9437513c..ef5c0f899a 100644 --- a/flytectl/docs/source/gen/flytectl_get_plugin-override.rst +++ b/flytectl/docs/source/gen/flytectl_get_plugin-override.rst @@ -110,10 +110,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -134,10 +136,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_project.rst b/flytectl/docs/source/gen/flytectl_get_project.rst index 07d25570a2..f9f6d4b3b0 100644 --- a/flytectl/docs/source/gen/flytectl_get_project.rst +++ b/flytectl/docs/source/gen/flytectl_get_project.rst @@ -93,10 +93,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -117,10 +119,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_task-resource-attribute.rst b/flytectl/docs/source/gen/flytectl_get_task-resource-attribute.rst index 2b2f369afe..b55906720b 100644 --- a/flytectl/docs/source/gen/flytectl_get_task-resource-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_get_task-resource-attribute.rst @@ -41,7 +41,7 @@ Example: content of tra.yaml: :: - flytectl get task-resource-attribute --attrFile tra.yaml + flytectl get -p flytesnacks -d development task-resource-attribute --attrFile tra.yaml .. code-block:: yaml @@ -93,10 +93,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -117,10 +119,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_task.rst b/flytectl/docs/source/gen/flytectl_get_task.rst index 383645221e..92d0f7521a 100644 --- a/flytectl/docs/source/gen/flytectl_get_task.rst +++ b/flytectl/docs/source/gen/flytectl_get_task.rst @@ -137,10 +137,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -161,10 +163,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_workflow-execution-config.rst b/flytectl/docs/source/gen/flytectl_get_workflow-execution-config.rst index 8c332c3ada..3f0820838a 100644 --- a/flytectl/docs/source/gen/flytectl_get_workflow-execution-config.rst +++ b/flytectl/docs/source/gen/flytectl_get_workflow-execution-config.rst @@ -150,10 +150,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -174,10 +176,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_get_workflow.rst b/flytectl/docs/source/gen/flytectl_get_workflow.rst index f446fdeb9f..1dfa2a2213 100644 --- a/flytectl/docs/source/gen/flytectl_get_workflow.rst +++ b/flytectl/docs/source/gen/flytectl_get_workflow.rst @@ -121,10 +121,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -145,10 +147,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_register.rst b/flytectl/docs/source/gen/flytectl_register.rst index 745dffa9b6..5720f3915a 100644 --- a/flytectl/docs/source/gen/flytectl_register.rst +++ b/flytectl/docs/source/gen/flytectl_register.rst @@ -46,10 +46,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -70,10 +72,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_register_examples.rst b/flytectl/docs/source/gen/flytectl_register_examples.rst index 9c681548d1..a61c709e23 100644 --- a/flytectl/docs/source/gen/flytectl_register_examples.rst +++ b/flytectl/docs/source/gen/flytectl_register_examples.rst @@ -72,10 +72,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -96,10 +98,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_register_files.rst b/flytectl/docs/source/gen/flytectl_register_files.rst index 512b1166b6..2f29c612f2 100644 --- a/flytectl/docs/source/gen/flytectl_register_files.rst +++ b/flytectl/docs/source/gen/flytectl_register_files.rst @@ -149,10 +149,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -173,10 +175,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_sandbox.rst b/flytectl/docs/source/gen/flytectl_sandbox.rst index 8cc08fc448..b609cfb1ea 100644 --- a/flytectl/docs/source/gen/flytectl_sandbox.rst +++ b/flytectl/docs/source/gen/flytectl_sandbox.rst @@ -70,10 +70,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -94,10 +96,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_sandbox_exec.rst b/flytectl/docs/source/gen/flytectl_sandbox_exec.rst index f1f3c44600..7089fa97ec 100644 --- a/flytectl/docs/source/gen/flytectl_sandbox_exec.rst +++ b/flytectl/docs/source/gen/flytectl_sandbox_exec.rst @@ -53,10 +53,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -77,10 +79,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_sandbox_start.rst b/flytectl/docs/source/gen/flytectl_sandbox_start.rst index 048b92b24e..e2c46b0ccc 100644 --- a/flytectl/docs/source/gen/flytectl_sandbox_start.rst +++ b/flytectl/docs/source/gen/flytectl_sandbox_start.rst @@ -122,10 +122,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -146,10 +148,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_sandbox_status.rst b/flytectl/docs/source/gen/flytectl_sandbox_status.rst index abce271578..abc05313e6 100644 --- a/flytectl/docs/source/gen/flytectl_sandbox_status.rst +++ b/flytectl/docs/source/gen/flytectl_sandbox_status.rst @@ -53,10 +53,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -77,10 +79,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_sandbox_teardown.rst b/flytectl/docs/source/gen/flytectl_sandbox_teardown.rst index c57c64b61a..7497ef6fcb 100644 --- a/flytectl/docs/source/gen/flytectl_sandbox_teardown.rst +++ b/flytectl/docs/source/gen/flytectl_sandbox_teardown.rst @@ -53,10 +53,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -77,10 +79,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update.rst b/flytectl/docs/source/gen/flytectl_update.rst index ffc18b7101..6ce9100531 100644 --- a/flytectl/docs/source/gen/flytectl_update.rst +++ b/flytectl/docs/source/gen/flytectl_update.rst @@ -47,10 +47,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -71,10 +73,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_cluster-resource-attribute.rst b/flytectl/docs/source/gen/flytectl_update_cluster-resource-attribute.rst index 55ea963f65..c488177371 100644 --- a/flytectl/docs/source/gen/flytectl_update_cluster-resource-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_update_cluster-resource-attribute.rst @@ -89,10 +89,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -113,10 +115,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_execution-cluster-label.rst b/flytectl/docs/source/gen/flytectl_update_execution-cluster-label.rst index 0117986578..30e0d8d74b 100644 --- a/flytectl/docs/source/gen/flytectl_update_execution-cluster-label.rst +++ b/flytectl/docs/source/gen/flytectl_update_execution-cluster-label.rst @@ -82,10 +82,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -106,10 +108,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_execution-queue-attribute.rst b/flytectl/docs/source/gen/flytectl_update_execution-queue-attribute.rst index 5b3f080c03..edc91e18d1 100644 --- a/flytectl/docs/source/gen/flytectl_update_execution-queue-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_update_execution-queue-attribute.rst @@ -93,10 +93,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -117,10 +119,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_execution.rst b/flytectl/docs/source/gen/flytectl_update_execution.rst index 373b625c9a..7c2a28d7fa 100644 --- a/flytectl/docs/source/gen/flytectl_update_execution.rst +++ b/flytectl/docs/source/gen/flytectl_update_execution.rst @@ -62,10 +62,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -86,10 +88,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_launchplan-meta.rst b/flytectl/docs/source/gen/flytectl_update_launchplan-meta.rst index 8e28b948c3..8ebab3cec8 100644 --- a/flytectl/docs/source/gen/flytectl_update_launchplan-meta.rst +++ b/flytectl/docs/source/gen/flytectl_update_launchplan-meta.rst @@ -67,10 +67,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -91,10 +93,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_launchplan.rst b/flytectl/docs/source/gen/flytectl_update_launchplan.rst index bb9992861d..2446e77f4a 100644 --- a/flytectl/docs/source/gen/flytectl_update_launchplan.rst +++ b/flytectl/docs/source/gen/flytectl_update_launchplan.rst @@ -63,10 +63,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -87,10 +89,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_plugin-override.rst b/flytectl/docs/source/gen/flytectl_update_plugin-override.rst index 95f744f85e..26c0690198 100644 --- a/flytectl/docs/source/gen/flytectl_update_plugin-override.rst +++ b/flytectl/docs/source/gen/flytectl_update_plugin-override.rst @@ -95,10 +95,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -119,10 +121,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_project.rst b/flytectl/docs/source/gen/flytectl_update_project.rst index ce203b736b..b405166eba 100644 --- a/flytectl/docs/source/gen/flytectl_update_project.rst +++ b/flytectl/docs/source/gen/flytectl_update_project.rst @@ -53,7 +53,7 @@ Then, pass it in using the *file* flag: To archive or activate (unarchive) a project using a *yaml* file: -* Add a state field, with a value of *0* for activated (unarchived) or *1* for archived, at the top level of the *yaml* file. +* Add a state field, with a value of *0* for activated (unarchived) or *1* for archived, at the top level of the the *yaml* file. * Add the *archive* flag to the command. @@ -133,10 +133,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -157,10 +159,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_task-meta.rst b/flytectl/docs/source/gen/flytectl_update_task-meta.rst index 61312af748..74f5502a4f 100644 --- a/flytectl/docs/source/gen/flytectl_update_task-meta.rst +++ b/flytectl/docs/source/gen/flytectl_update_task-meta.rst @@ -67,10 +67,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -91,10 +93,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_task-resource-attribute.rst b/flytectl/docs/source/gen/flytectl_update_task-resource-attribute.rst index 0ef4798aab..798675b7b9 100644 --- a/flytectl/docs/source/gen/flytectl_update_task-resource-attribute.rst +++ b/flytectl/docs/source/gen/flytectl_update_task-resource-attribute.rst @@ -95,10 +95,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -119,10 +121,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_workflow-execution-config.rst b/flytectl/docs/source/gen/flytectl_update_workflow-execution-config.rst index b025df8a57..553a7c7e7d 100644 --- a/flytectl/docs/source/gen/flytectl_update_workflow-execution-config.rst +++ b/flytectl/docs/source/gen/flytectl_update_workflow-execution-config.rst @@ -91,10 +91,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -115,10 +117,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_update_workflow-meta.rst b/flytectl/docs/source/gen/flytectl_update_workflow-meta.rst index aadccfabd2..fcbf56d22e 100644 --- a/flytectl/docs/source/gen/flytectl_update_workflow-meta.rst +++ b/flytectl/docs/source/gen/flytectl_update_workflow-meta.rst @@ -67,10 +67,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -91,10 +93,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_upgrade.rst b/flytectl/docs/source/gen/flytectl_upgrade.rst index a0bcedda9d..23ea76ea60 100644 --- a/flytectl/docs/source/gen/flytectl_upgrade.rst +++ b/flytectl/docs/source/gen/flytectl_upgrade.rst @@ -61,10 +61,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -85,10 +87,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/docs/source/gen/flytectl_version.rst b/flytectl/docs/source/gen/flytectl_version.rst index 220a375741..6a075a71c8 100644 --- a/flytectl/docs/source/gen/flytectl_version.rst +++ b/flytectl/docs/source/gen/flytectl_version.rst @@ -50,10 +50,12 @@ Options inherited from parent commands --admin.insecure Use insecure connection. --admin.insecureSkipVerify InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' --admin.maxBackoffDelay string Max delay for grpc backoff (default "8s") + --admin.maxMessageSizeBytes int The max size in bytes for incoming gRPC messages --admin.maxRetries int Max number of gRPC retries (default 4) --admin.perRetryTimeout string gRPC per retry timeout (default "15s") --admin.pkceConfig.refreshTime string grace period from the token expiry after which it would refresh the token. (default "5m0s") --admin.pkceConfig.timeout string Amount of time the browser session would be active for authentication from client app. (default "2m0s") + --admin.proxyCommand strings Command for external proxy-authorization token generation --admin.scopes strings List of scopes to request --admin.tokenRefreshWindow string Max duration between token refresh attempt and token expiry. (default "0s") --admin.tokenUrl string OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. @@ -74,10 +76,17 @@ Options inherited from parent commands --files.outputLocationPrefix string Custom output location prefix for offloaded types (files/schemas). --files.sourceUploadPath string Deprecated: Update flyte admin to avoid having to configure storage access from flytectl. --files.version string Version of the entity to be registered with flyte which are un-versioned after serialization. + -i, --interactive Set this flag to use an interactive CLI --logger.formatter.type string Sets logging format type. (default "json") --logger.level int Sets the minimum logging level. (default 3) --logger.mute Mutes all logs regardless of severity. Intended for benchmarks/tests only. --logger.show-source Includes source code location in logs. + --otel.file.filename string Filename to store exported telemetry traces (default "/tmp/trace.txt") + --otel.jaeger.endpoint string Endpoint for the jaeger telemetry trace ingestor (default "http://localhost:14268/api/traces") + --otel.otlpgrpc.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4317") + --otel.otlphttp.endpoint string Endpoint for the OTLP telemetry trace collector (default "http://localhost:4318/v1/traces") + --otel.sampler.parentSampler string Sets the parent sampler to use for the tracer (default "always") + --otel.type string Sets the type of exporter to configure [noop/file/jaeger/otlpgrpc/otlphttp]. (default "noop") -o, --output string Specifies the output type - supported formats [TABLE JSON YAML DOT DOTURL]. NOTE: dot, doturl are only supported for Workflow (default "TABLE") -p, --project string Specifies the Flyte project. --storage.cache.max_size_mbs int Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used diff --git a/flytectl/go.mod b/flytectl/go.mod index b657a02d4d..a8b0fe8bb2 100644 --- a/flytectl/go.mod +++ b/flytectl/go.mod @@ -141,6 +141,7 @@ require ( github.com/prometheus/procfs v0.15.1 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/santhosh-tekuri/jsonschema v1.2.4 // indirect github.com/shamaton/msgpack/v2 v2.2.2 // indirect github.com/spf13/afero v1.9.2 // indirect github.com/spf13/cast v1.4.1 // indirect @@ -148,6 +149,12 @@ require ( github.com/spf13/viper v1.11.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.2.0 // indirect + github.com/tidwall/gjson v1.17.1 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/tidwall/sjson v1.2.5 // indirect + github.com/wI2L/jsondiff v0.6.0 // indirect + gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect diff --git a/flytectl/go.sum b/flytectl/go.sum index f57ca65c0a..9f81c0ec9d 100644 --- a/flytectl/go.sum +++ b/flytectl/go.sum @@ -418,6 +418,8 @@ github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/santhosh-tekuri/jsonschema v1.2.4 h1:hNhW8e7t+H1vgY+1QeEQpveR6D4+OwKPXCfD2aieJis= +github.com/santhosh-tekuri/jsonschema v1.2.4/go.mod h1:TEAUOeZSmIxTTuHatJzrvARHiuO9LYd+cIxzgEHCQI4= github.com/shamaton/msgpack/v2 v2.2.2 h1:GOIg0c9LV04VwzOOqZSrmsv/JzjNOOMxnS/HvOHGdgs= github.com/shamaton/msgpack/v2 v2.2.2/go.mod h1:6khjYnkx73f7VQU7wjcFS9DFjs+59naVWJv1TB7qdOI= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -457,6 +459,18 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= +github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/wI2L/jsondiff v0.6.0 h1:zrsH3FbfVa3JO9llxrcDy/XLkYPLgoMX6Mz3T2PP2AI= +github.com/wI2L/jsondiff v0.6.0/go.mod h1:D6aQ5gKgPF9g17j+E9N7aasmU1O+XvfmWm1y8UMmNpw= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY= github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -466,6 +480,8 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zalando/go-keyring v0.1.1 h1:w2V9lcx/Uj4l+dzAf1m9s+DJ1O8ROkEHnynonHjTcYE= github.com/zalando/go-keyring v0.1.1/go.mod h1:OIC+OZ28XbmwFxU/Rp9V7eKzZjamBJwRzC8UFJH9+L8= +gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7 h1:BAkxmYRc1ZPl6Gap4HWqwPT8yLZMrgaAwx12Ft408sg= +gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7/go.mod h1:X40Z1OU8o1oiXWzBmkuYOaruzYGv60l0AxGiB0E9keI= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= diff --git a/flytectl/pkg/bubbletea/bubbletea_pagination.go b/flytectl/pkg/bubbletea/bubbletea_pagination.go index 01a4b7ea98..bc76aaec1c 100644 --- a/flytectl/pkg/bubbletea/bubbletea_pagination.go +++ b/flytectl/pkg/bubbletea/bubbletea_pagination.go @@ -147,7 +147,7 @@ func Paginator(_listHeader []printer.Column, _callback DataCallback, _filter fil listHeader = _listHeader callback = _callback filter = _filter - filter.Page = int32(_max(int(filter.Page), 1)) + filter.Page = max(filter.Page, 1) firstBatchIndex = (int(filter.Page) - 1) / pagePerBatch lastBatchIndex = firstBatchIndex diff --git a/flytectl/pkg/bubbletea/bubbletea_pagination_util.go b/flytectl/pkg/bubbletea/bubbletea_pagination_util.go index dc6ddd735b..3d2a02dd09 100644 --- a/flytectl/pkg/bubbletea/bubbletea_pagination_util.go +++ b/flytectl/pkg/bubbletea/bubbletea_pagination_util.go @@ -65,23 +65,9 @@ func (p printTableProto) MarshalJSON() ([]byte, error) { return buf.Bytes(), nil } -func _max(a, b int) int { - if a > b { - return a - } - return b -} - -func _min(a, b int) int { - if a < b { - return a - } - return b -} - func getSliceBounds(m *pageModel) (start int, end int) { start = (m.paginator.Page - firstBatchIndex*pagePerBatch) * msgPerPage - end = _min(start+msgPerPage, len(*m.items)) + end = min(start+msgPerPage, len(*m.items)) return start, end } @@ -117,7 +103,7 @@ func getMessageList(batchIndex int) ([]proto.Message, error) { msg, err := callback(filters.Filters{ Limit: msgPerBatch, - Page: int32(batchIndex + 1), + Page: int32(batchIndex + 1), // #nosec G115 SortBy: filter.SortBy, Asc: filter.Asc, }) diff --git a/flytectl/pkg/docker/docker_util.go b/flytectl/pkg/docker/docker_util.go index f093e3d49a..a495fdc514 100644 --- a/flytectl/pkg/docker/docker_util.go +++ b/flytectl/pkg/docker/docker_util.go @@ -134,14 +134,14 @@ func GetSandboxPorts() (map[nat.Port]struct{}, map[nat.Port][]nat.PortBinding, e } // GetDemoPorts will return demo ports -func GetDemoPorts() (map[nat.Port]struct{}, map[nat.Port][]nat.PortBinding, error) { +func GetDemoPorts(k8sPort string) (map[nat.Port]struct{}, map[nat.Port][]nat.PortBinding, error) { return nat.ParsePortSpecs([]string{ - "0.0.0.0:6443:6443", // K3s API Port - "0.0.0.0:30080:30080", // HTTP Port - "0.0.0.0:30000:30000", // Registry Port - "0.0.0.0:30001:30001", // Postgres Port - "0.0.0.0:30002:30002", // Minio API Port (use HTTP port for minio console) - "0.0.0.0:30003:30003", // Buildkit Port + fmt.Sprintf("0.0.0.0:%s:6443", k8sPort), // K3s API Port + "0.0.0.0:30080:30080", // HTTP Port + "0.0.0.0:30000:30000", // Registry Port + "0.0.0.0:30001:30001", // Postgres Port + "0.0.0.0:30002:30002", // Minio API Port (use HTTP port for minio console) + "0.0.0.0:30003:30003", // Buildkit Port }) } diff --git a/flytectl/pkg/docker/docker_util_test.go b/flytectl/pkg/docker/docker_util_test.go index 8decd8824d..a03acab866 100644 --- a/flytectl/pkg/docker/docker_util_test.go +++ b/flytectl/pkg/docker/docker_util_test.go @@ -435,7 +435,7 @@ func TestGetOrCreateVolume(t *testing.T) { } func TestDemoPorts(t *testing.T) { - _, ports, _ := GetDemoPorts() + _, ports, _ := GetDemoPorts("6443") assert.Equal(t, 6, len(ports)) } diff --git a/flytectl/pkg/ext/launch_plan_fetcher.go b/flytectl/pkg/ext/launch_plan_fetcher.go index 5a8befc093..8f047f681c 100644 --- a/flytectl/pkg/ext/launch_plan_fetcher.go +++ b/flytectl/pkg/ext/launch_plan_fetcher.go @@ -19,10 +19,10 @@ func (a *AdminFetcherExtClient) FetchAllVerOfLP(ctx context.Context, lpName, pro if err != nil { return nil, err } - if len(tList.LaunchPlans) == 0 { + if len(tList.GetLaunchPlans()) == 0 { return nil, fmt.Errorf("no launchplans retrieved for %v", lpName) } - return tList.LaunchPlans, nil + return tList.GetLaunchPlans(), nil } // FetchLPLatestVersion fetches latest version for give launch plan name diff --git a/flytectl/pkg/ext/project_fetcher.go b/flytectl/pkg/ext/project_fetcher.go index a1e83fdf70..f6495b8ff2 100644 --- a/flytectl/pkg/ext/project_fetcher.go +++ b/flytectl/pkg/ext/project_fetcher.go @@ -33,13 +33,13 @@ func (a *AdminFetcherExtClient) GetProjectByID(ctx context.Context, projectID st return nil, err } - if len(response.Projects) == 0 { + if len(response.GetProjects()) == 0 { return nil, NewNotFoundError("project %s", projectID) } - if len(response.Projects) > 1 { - panic(fmt.Sprintf("unexpected number of projects in ListProjects response: %d - 0 or 1 expected", len(response.Projects))) + if len(response.GetProjects()) > 1 { + panic(fmt.Sprintf("unexpected number of projects in ListProjects response: %d - 0 or 1 expected", len(response.GetProjects()))) } - return response.Projects[0], nil + return response.GetProjects()[0], nil } diff --git a/flytectl/pkg/ext/task_fetcher.go b/flytectl/pkg/ext/task_fetcher.go index 53c0acccb0..d602ef59b3 100644 --- a/flytectl/pkg/ext/task_fetcher.go +++ b/flytectl/pkg/ext/task_fetcher.go @@ -18,10 +18,10 @@ func (a *AdminFetcherExtClient) FetchAllVerOfTask(ctx context.Context, name, pro if err != nil { return nil, err } - if len(tList.Tasks) == 0 { + if len(tList.GetTasks()) == 0 { return nil, fmt.Errorf("no tasks retrieved for %v", name) } - return tList.Tasks, nil + return tList.GetTasks(), nil } func (a *AdminFetcherExtClient) FetchTaskLatestVersion(ctx context.Context, name, project, domain string, filter filters.Filters) (*admin.Task, error) { diff --git a/flytectl/pkg/ext/workflow_fetcher.go b/flytectl/pkg/ext/workflow_fetcher.go index 69032bb998..0aacdd756a 100644 --- a/flytectl/pkg/ext/workflow_fetcher.go +++ b/flytectl/pkg/ext/workflow_fetcher.go @@ -19,10 +19,10 @@ func (a *AdminFetcherExtClient) FetchAllVerOfWorkflow(ctx context.Context, workf if err != nil { return nil, err } - if len(wList.Workflows) == 0 { + if len(wList.GetWorkflows()) == 0 { return nil, fmt.Errorf("no workflow retrieved for %v", workflowName) } - return wList.Workflows, nil + return wList.GetWorkflows(), nil } // FetchAllWorkflows fetches all workflows in project domain @@ -35,10 +35,10 @@ func (a *AdminFetcherExtClient) FetchAllWorkflows(ctx context.Context, project, if err != nil { return nil, err } - if len(wList.Entities) == 0 { + if len(wList.GetEntities()) == 0 { return nil, fmt.Errorf("no workflow retrieved for %v project %v domain", project, domain) } - return wList.Entities, nil + return wList.GetEntities(), nil } // FetchWorkflowLatestVersion fetches latest version for given workflow name @@ -53,7 +53,7 @@ func (a *AdminFetcherExtClient) FetchWorkflowLatestVersion(ctx context.Context, if err != nil { return nil, err } - return a.FetchWorkflowVersion(ctx, name, wVersions[0].Id.Version, project, domain) + return a.FetchWorkflowVersion(ctx, name, wVersions[0].GetId().GetVersion(), project, domain) } // FetchWorkflowVersion fetches particular version of workflow diff --git a/flytectl/pkg/filters/util.go b/flytectl/pkg/filters/util.go index a19481e32d..aed4d25f16 100644 --- a/flytectl/pkg/filters/util.go +++ b/flytectl/pkg/filters/util.go @@ -13,7 +13,7 @@ func BuildResourceListRequestWithName(c Filters, project, domain, name string) ( return nil, err } request := &admin.ResourceListRequest{ - Limit: uint32(c.Limit), + Limit: uint32(c.Limit), // #nosec G115 Token: getToken(c), Filters: fieldSelector, Id: &admin.NamedEntityIdentifier{ @@ -36,7 +36,7 @@ func BuildNamedEntityListRequest(c Filters, project, domain string, resourceType return nil, err } request := &admin.NamedEntityListRequest{ - Limit: uint32(c.Limit), + Limit: uint32(c.Limit), // #nosec G115 Token: getToken(c), Filters: fieldSelector, Project: project, @@ -55,7 +55,7 @@ func BuildProjectListRequest(c Filters) (*admin.ProjectListRequest, error) { return nil, err } request := &admin.ProjectListRequest{ - Limit: uint32(c.Limit), + Limit: uint32(c.Limit), // #nosec G115 Token: getToken(c), Filters: fieldSelector, SortBy: buildSortingRequest(c), diff --git a/flytectl/pkg/k8s/k8s_test.go b/flytectl/pkg/k8s/k8s_test.go index 84dc16923c..41ea4eaafa 100644 --- a/flytectl/pkg/k8s/k8s_test.go +++ b/flytectl/pkg/k8s/k8s_test.go @@ -45,7 +45,8 @@ users: t.Error(err) } defer os.Remove(tmpfile.Name()) - if err := ioutil.WriteFile(tmpfile.Name(), []byte(content), os.ModePerm); err != nil { + // #nosec G306 + if err := os.WriteFile(tmpfile.Name(), []byte(content), os.ModePerm); err != nil { t.Error(err) } t.Run("Create client from config", func(t *testing.T) { diff --git a/flytectl/pkg/printer/printer.go b/flytectl/pkg/printer/printer.go index df7effd8a1..bc67d75e9d 100644 --- a/flytectl/pkg/printer/printer.go +++ b/flytectl/pkg/printer/printer.go @@ -199,8 +199,8 @@ func FormatVariableDescriptions(variableMap map[string]*core.Variable) { for _, k := range keys { v := variableMap[k] // a: a isn't very helpful - if k != v.Description { - descriptions = append(descriptions, getTruncatedLine(fmt.Sprintf("%s: %s", k, v.Description))) + if k != v.GetDescription() { + descriptions = append(descriptions, getTruncatedLine(fmt.Sprintf("%s: %s", k, v.GetDescription()))) } else { descriptions = append(descriptions, getTruncatedLine(k)) } @@ -220,12 +220,12 @@ func FormatParameterDescriptions(parameterMap map[string]*core.Parameter) { var descriptions []string for _, k := range keys { v := parameterMap[k] - if v.Var == nil { + if v.GetVar() == nil { continue } // a: a isn't very helpful - if k != v.Var.Description { - descriptions = append(descriptions, getTruncatedLine(fmt.Sprintf("%s: %s", k, v.Var.Description))) + if k != v.GetVar().GetDescription() { + descriptions = append(descriptions, getTruncatedLine(fmt.Sprintf("%s: %s", k, v.GetVar().GetDescription()))) } else { descriptions = append(descriptions, getTruncatedLine(k)) } @@ -272,7 +272,7 @@ func (p Printer) Print(format OutputFormat, columns []Column, messages ...proto. return fmt.Errorf("at least one workflow required for visualization") } workflow := workflows[0] - graphStr, err := visualize.RenderWorkflow(workflow.Closure.CompiledWorkflow) + graphStr, err := visualize.RenderWorkflow(workflow.GetClosure().GetCompiledWorkflow()) if err != nil { return errors.Wrapf("VisualizationError", err, "failed to visualize workflow") } diff --git a/flytectl/pkg/printer/printer_test.go b/flytectl/pkg/printer/printer_test.go index afc5edb7b1..3783e5f7ca 100644 --- a/flytectl/pkg/printer/printer_test.go +++ b/flytectl/pkg/printer/printer_test.go @@ -282,7 +282,7 @@ func TestFormatVariableDescriptions(t *testing.T) { "bar": barVar, } FormatVariableDescriptions(variableMap) - assert.Equal(t, "bar\nfoo\nvar1: foo\nvar2: bar", variableMap[DefaultFormattedDescriptionsKey].Description) + assert.Equal(t, "bar\nfoo\nvar1: foo\nvar2: bar", variableMap[DefaultFormattedDescriptionsKey].GetDescription()) } func TestFormatParameterDescriptions(t *testing.T) { @@ -305,5 +305,5 @@ func TestFormatParameterDescriptions(t *testing.T) { "empty": emptyParam, } FormatParameterDescriptions(paramMap) - assert.Equal(t, "bar\nfoo\nvar1: foo\nvar2: bar", paramMap[DefaultFormattedDescriptionsKey].Var.Description) + assert.Equal(t, "bar\nfoo\nvar1: foo\nvar2: bar", paramMap[DefaultFormattedDescriptionsKey].GetVar().GetDescription()) } diff --git a/flytectl/pkg/sandbox/reload.go b/flytectl/pkg/sandbox/reload.go new file mode 100644 index 0000000000..f68b385443 --- /dev/null +++ b/flytectl/pkg/sandbox/reload.go @@ -0,0 +1,47 @@ +package sandbox + +import ( + "context" + "fmt" + + sandboxCmdConfig "github.com/flyteorg/flyte/flytectl/cmd/config/subcommand/sandbox" + "github.com/flyteorg/flyte/flytectl/pkg/docker" + "github.com/flyteorg/flyte/flytectl/pkg/k8s" + "github.com/flyteorg/flyte/flytestdlib/logger" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +const ( + flyteNs = "flyte" + labelSelector = "app.kubernetes.io/name=flyte-binary" +) + +// LegacyReloadDemoCluster will kill the flyte binary pod so the new one can pick up a new config file +func LegacyReloadDemoCluster(ctx context.Context, sandboxConfig *sandboxCmdConfig.Config) error { + k8sEndpoint := sandboxConfig.GetK8sEndpoint() + k8sClient, err := k8s.GetK8sClient(docker.Kubeconfig, k8sEndpoint) + if err != nil { + fmt.Println("Could not get K8s client") + return err + } + pi := k8sClient.CoreV1().Pods(flyteNs) + podList, err := pi.List(ctx, v1.ListOptions{LabelSelector: labelSelector}) + if err != nil { + fmt.Println("could not list pods") + return err + } + if len(podList.Items) != 1 { + return fmt.Errorf("should only have one pod running, %d found, %v", len(podList.Items), podList.Items) + } + logger.Debugf(ctx, "Found %d pods\n", len(podList.Items)) + var grace = int64(0) + err = pi.Delete(ctx, podList.Items[0].Name, v1.DeleteOptions{ + GracePeriodSeconds: &grace, + }) + if err != nil { + fmt.Printf("Could not delete Flyte pod, old configuration may still be in effect. Err: %s\n", err) + return err + } + + return nil +} diff --git a/flytectl/pkg/sandbox/start.go b/flytectl/pkg/sandbox/start.go index 6681baf5e1..e638301741 100644 --- a/flytectl/pkg/sandbox/start.go +++ b/flytectl/pkg/sandbox/start.go @@ -36,7 +36,6 @@ const ( taintEffect = "NoSchedule" sandboxContextName = "flyte-sandbox" sandboxDockerContext = "default" - K8sEndpoint = "https://127.0.0.1:6443" sandboxK8sEndpoint = "https://127.0.0.1:30086" sandboxImageName = "cr.flyte.org/flyteorg/flyte-sandbox" demoImageName = "cr.flyte.org/flyteorg/flyte-sandbox-bundled" @@ -280,12 +279,13 @@ func StartCluster(ctx context.Context, args []string, sandboxConfig *sandboxCmdC return err } + k8sEndpoint := sandboxConfig.GetK8sEndpoint() if reader != nil { var k8sClient k8s.K8s err = retry.Do( func() error { // This should wait for the kubeconfig file being there. - k8sClient, err = k8s.GetK8sClient(docker.Kubeconfig, K8sEndpoint) + k8sClient, err = k8s.GetK8sClient(docker.Kubeconfig, k8sEndpoint) return err }, retry.Attempts(10), @@ -299,7 +299,7 @@ func StartCluster(ctx context.Context, args []string, sandboxConfig *sandboxCmdC err = retry.Do( func() error { // Have to get a new client every time because you run into x509 errors if not - k8sClient, err = k8s.GetK8sClient(docker.Kubeconfig, K8sEndpoint) + k8sClient, err = k8s.GetK8sClient(docker.Kubeconfig, k8sEndpoint) if err != nil { logger.Debugf(ctx, "Error getting K8s client in liveness check %s", err) return err @@ -398,7 +398,7 @@ func StartClusterForSandbox(ctx context.Context, args []string, sandboxConfig *s func StartDemoCluster(ctx context.Context, args []string, sandboxConfig *sandboxCmdConfig.Config) error { sandboxImagePrefix := "sha" - exposedPorts, portBindings, err := docker.GetDemoPorts() + exposedPorts, portBindings, err := docker.GetDemoPorts(sandboxConfig.Port) if err != nil { return err } diff --git a/flytectl/pkg/sandbox/start_test.go b/flytectl/pkg/sandbox/start_test.go index 9d24295758..84a0a4fd35 100644 --- a/flytectl/pkg/sandbox/start_test.go +++ b/flytectl/pkg/sandbox/start_test.go @@ -123,7 +123,7 @@ func TestStartFunc(t *testing.T) { config.DisableAgent = true assert.Nil(t, util.SetupFlyteDir()) assert.Nil(t, os.MkdirAll(f.FilePathJoin(f.UserHomeDir(), ".flyte", "state"), os.ModePerm)) - assert.Nil(t, ioutil.WriteFile(docker.Kubeconfig, []byte(content), os.ModePerm)) + assert.Nil(t, os.WriteFile(docker.Kubeconfig, []byte(content), os.ModePerm)) // #nosec G306 fakePod.SetName("flyte") diff --git a/flytectl/pkg/util/util.go b/flytectl/pkg/util/util.go index 18067d1702..49b1278c14 100644 --- a/flytectl/pkg/util/util.go +++ b/flytectl/pkg/util/util.go @@ -3,7 +3,6 @@ package util import ( "fmt" "io" - "io/ioutil" "net/http" "os" "path/filepath" @@ -26,7 +25,7 @@ var Ext string // WriteIntoFile will write content in a file func WriteIntoFile(data []byte, file string) error { - err := ioutil.WriteFile(file, data, os.ModePerm) + err := os.WriteFile(file, data, os.ModePerm) // #nosec G306 if err != nil { return err } @@ -38,6 +37,7 @@ func CreatePathAndFile(pathToConfig string) error { if err != nil { return err } + // #nosec G306 if err := os.MkdirAll(filepath.Dir(p), os.ModePerm); err != nil { return err } @@ -45,6 +45,7 @@ func CreatePathAndFile(pathToConfig string) error { // Created a empty file with right permission if _, err := os.Stat(p); err != nil { if os.IsNotExist(err) { + // #nosec G306 if err := os.WriteFile(p, []byte(""), os.ModePerm); err != nil { return err } @@ -62,6 +63,7 @@ func SetupFlyteDir() error { // Created a empty file with right permission if _, err := os.Stat(docker.Kubeconfig); err != nil { if os.IsNotExist(err) { + // #nosec G306 if err := os.WriteFile(docker.Kubeconfig, []byte(""), os.ModePerm); err != nil { return err } diff --git a/flytectl/pkg/visualize/graphviz.go b/flytectl/pkg/visualize/graphviz.go index 745c7ad248..be4f275fc5 100644 --- a/flytectl/pkg/visualize/graphviz.go +++ b/flytectl/pkg/visualize/graphviz.go @@ -56,11 +56,11 @@ func operandToString(op *core.Operand) string { } func comparisonToString(expr *core.ComparisonExpression) string { - return fmt.Sprintf("%s %s %s", operandToString(expr.LeftValue), expr.Operator.String(), operandToString(expr.RightValue)) + return fmt.Sprintf("%s %s %s", operandToString(expr.GetLeftValue()), expr.GetOperator().String(), operandToString(expr.GetRightValue())) } func conjunctionToString(expr *core.ConjunctionExpression) string { - return fmt.Sprintf("(%s) %s (%s)", booleanExprToString(expr.LeftExpression), expr.Operator.String(), booleanExprToString(expr.RightExpression)) + return fmt.Sprintf("(%s) %s (%s)", booleanExprToString(expr.GetLeftExpression()), expr.GetOperator().String(), booleanExprToString(expr.GetRightExpression())) } func booleanExprToString(expr *core.BooleanExpression) string { @@ -86,9 +86,9 @@ func constructEndNode(parentGraph string, n string, graph Graphvizer) (*graphviz func constructTaskNode(parentGraph string, name string, graph Graphvizer, n *core.Node, t *core.CompiledTask) (*graphviz.Node, error) { attrs := map[string]string{ShapeType: BoxShape} - if n.Metadata != nil && n.Metadata.Name != "" { - v := strings.LastIndexAny(n.Metadata.Name, ".") - attrs[LabelAttr] = fmt.Sprintf("\"%s [%s]\"", n.Metadata.Name[v+1:], t.Template.Type) + if n.GetMetadata() != nil && n.GetMetadata().GetName() != "" { + v := strings.LastIndexAny(n.GetMetadata().GetName(), ".") + attrs[LabelAttr] = fmt.Sprintf("\"%s [%s]\"", n.GetMetadata().GetName()[v+1:], t.GetTemplate().GetType()) } tName := strings.ReplaceAll(name, "-", "_") err := graph.AddNode(parentGraph, tName, attrs) @@ -104,8 +104,8 @@ func constructErrorNode(parentGraph string, name string, graph Graphvizer, m str func constructBranchConditionNode(parentGraph string, name string, graph Graphvizer, n *core.Node) (*graphviz.Node, error) { attrs := map[string]string{ShapeType: DiamondShape} - if n.Metadata != nil && n.Metadata.Name != "" { - attrs[LabelAttr] = fmt.Sprintf("\"[%s]\"", n.Metadata.Name) + if n.GetMetadata() != nil && n.GetMetadata().GetName() != "" { + attrs[LabelAttr] = fmt.Sprintf("\"[%s]\"", n.GetMetadata().GetName()) } cName := strings.ReplaceAll(name, "-", "_") err := graph.AddNode(parentGraph, cName, attrs) @@ -151,27 +151,27 @@ func (gb *graphBuilder) addBranchSubNodeEdge(graph Graphvizer, parentNode, n *gr } func (gb *graphBuilder) constructBranchNode(parentGraph string, prefix string, graph Graphvizer, n *core.Node) (*graphviz.Node, error) { - parentBranchNode, err := constructBranchConditionNode(parentGraph, getName(prefix, n.Id), graph, n) + parentBranchNode, err := constructBranchConditionNode(parentGraph, getName(prefix, n.GetId()), graph, n) if err != nil { return nil, err } - gb.graphNodes[n.Id] = parentBranchNode + gb.graphNodes[n.GetId()] = parentBranchNode if n.GetBranchNode().GetIfElse() == nil { return parentBranchNode, nil } - subNode, err := gb.constructNode(parentGraph, prefix, graph, n.GetBranchNode().GetIfElse().Case.ThenNode) + subNode, err := gb.constructNode(parentGraph, prefix, graph, n.GetBranchNode().GetIfElse().GetCase().GetThenNode()) if err != nil { return nil, err } - if err := gb.addBranchSubNodeEdge(graph, parentBranchNode, subNode, booleanExprToString(n.GetBranchNode().GetIfElse().Case.Condition)); err != nil { + if err := gb.addBranchSubNodeEdge(graph, parentBranchNode, subNode, booleanExprToString(n.GetBranchNode().GetIfElse().GetCase().GetCondition())); err != nil { return nil, err } if n.GetBranchNode().GetIfElse().GetError() != nil { name := fmt.Sprintf("%s-error", parentBranchNode.Name) - subNode, err := constructErrorNode(prefix, name, graph, n.GetBranchNode().GetIfElse().GetError().Message) + subNode, err := constructErrorNode(prefix, name, graph, n.GetBranchNode().GetIfElse().GetError().GetMessage()) if err != nil { return nil, err } @@ -191,11 +191,11 @@ func (gb *graphBuilder) constructBranchNode(parentGraph string, prefix string, g if n.GetBranchNode().GetIfElse().GetOther() != nil { for _, c := range n.GetBranchNode().GetIfElse().GetOther() { - subNode, err := gb.constructNode(parentGraph, prefix, graph, c.ThenNode) + subNode, err := gb.constructNode(parentGraph, prefix, graph, c.GetThenNode()) if err != nil { return nil, err } - if err := gb.addBranchSubNodeEdge(graph, parentBranchNode, subNode, booleanExprToString(c.Condition)); err != nil { + if err := gb.addBranchSubNodeEdge(graph, parentBranchNode, subNode, booleanExprToString(c.GetCondition())); err != nil { return nil, err } } @@ -204,18 +204,18 @@ func (gb *graphBuilder) constructBranchNode(parentGraph string, prefix string, g } func (gb *graphBuilder) constructNode(parentGraphName string, prefix string, graph Graphvizer, n *core.Node) (*graphviz.Node, error) { - name := getName(prefix, n.Id) + name := getName(prefix, n.GetId()) var err error var gn *graphviz.Node - if n.Id == StartNode { + if n.GetId() == StartNode { gn, err = constructStartNode(parentGraphName, strings.ReplaceAll(name, "-", "_"), graph) gb.nodeClusters[name] = parentGraphName - } else if n.Id == EndNode { + } else if n.GetId() == EndNode { gn, err = constructEndNode(parentGraphName, strings.ReplaceAll(name, "-", "_"), graph) gb.nodeClusters[name] = parentGraphName } else { - switch n.Target.(type) { + switch n.GetTarget().(type) { case *core.Node_TaskNode: tID := n.GetTaskNode().GetReferenceId().String() t, ok := gb.tasks[tID] @@ -228,7 +228,7 @@ func (gb *graphBuilder) constructNode(parentGraphName string, prefix string, gra } gb.nodeClusters[name] = parentGraphName case *core.Node_BranchNode: - sanitizedName := strings.ReplaceAll(n.Metadata.Name, "-", "_") + sanitizedName := strings.ReplaceAll(n.GetMetadata().GetName(), "-", "_") branchSubGraphName := SubgraphPrefix + sanitizedName err := graph.AddSubGraph(parentGraphName, branchSubGraphName, map[string]string{LabelAttr: sanitizedName}) if err != nil { @@ -269,7 +269,7 @@ func (gb *graphBuilder) constructNode(parentGraphName string, prefix string, gra if err != nil { return nil, err } - gb.graphNodes[n.Id] = gn + gb.graphNodes[n.GetId()] = gn return gn, nil } @@ -298,27 +298,27 @@ func (gb *graphBuilder) addEdge(fromNodeName, toNodeName string, graph Graphvize } func (gb *graphBuilder) constructGraph(parentGraphName string, prefix string, graph Graphvizer, w *core.CompiledWorkflow) error { - if w == nil || w.Template == nil { + if w == nil || w.GetTemplate() == nil { return nil } - for _, n := range w.Template.Nodes { + for _, n := range w.GetTemplate().GetNodes() { if _, err := gb.constructNode(parentGraphName, prefix, graph, n); err != nil { return err } } for name := range gb.graphNodes { - upstreamNodes := w.Connections.Upstream[name] - downstreamNodes := w.Connections.Downstream[name] + upstreamNodes := w.GetConnections().GetUpstream()[name] + downstreamNodes := w.GetConnections().GetDownstream()[name] if downstreamNodes != nil { - for _, n := range downstreamNodes.Ids { + for _, n := range downstreamNodes.GetIds() { if err := gb.addEdge(name, n, graph); err != nil { return err } } } if upstreamNodes != nil { - for _, n := range upstreamNodes.Ids { + for _, n := range upstreamNodes.GetIds() { if err := gb.addEdge(n, name, graph); err != nil { return err } @@ -334,23 +334,23 @@ func (gb *graphBuilder) CompiledWorkflowClosureToGraph(w *core.CompiledWorkflowC _ = dotGraph.SetStrict(true) tLookup := make(map[string]*core.CompiledTask) - for _, t := range w.Tasks { - if t.Template == nil || t.Template.Id == nil { + for _, t := range w.GetTasks() { + if t.GetTemplate() == nil || t.GetTemplate().GetId() == nil { return FlyteGraph{}, fmt.Errorf("no template found in the workflow task %v", t) } - tLookup[t.Template.Id.String()] = t + tLookup[t.GetTemplate().GetId().String()] = t } gb.tasks = tLookup wLookup := make(map[string]*core.CompiledWorkflow) - for _, swf := range w.SubWorkflows { - if swf.Template == nil || swf.Template.Id == nil { + for _, swf := range w.GetSubWorkflows() { + if swf.GetTemplate() == nil || swf.GetTemplate().GetId() == nil { return FlyteGraph{}, fmt.Errorf("no template found in the sub workflow %v", swf) } - wLookup[swf.Template.Id.String()] = swf + wLookup[swf.GetTemplate().GetId().String()] = swf } gb.subWf = wLookup - return dotGraph, gb.constructGraph("", "", dotGraph, w.Primary) + return dotGraph, gb.constructGraph("", "", dotGraph, w.GetPrimary()) } func newGraphBuilder() *graphBuilder { diff --git a/flyteidl/.golangci.yml b/flyteidl/.golangci.yml index 7714cbe5a3..8eda34cffe 100644 --- a/flyteidl/.golangci.yml +++ b/flyteidl/.golangci.yml @@ -1,31 +1,22 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - run: skip-dirs: - - pkg/client + - pkg/client - gen - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unparam - unused - - varcheck + - protogetter diff --git a/flyteidl/clients/go/admin/auth_interceptor.go b/flyteidl/clients/go/admin/auth_interceptor.go index 5d3d9fd92f..802db2cb0e 100644 --- a/flyteidl/clients/go/admin/auth_interceptor.go +++ b/flyteidl/clients/go/admin/auth_interceptor.go @@ -13,6 +13,7 @@ import ( "google.golang.org/grpc/status" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/cache" + "github.com/flyteorg/flyte/flyteidl/clients/go/admin/utils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/logger" ) @@ -23,7 +24,6 @@ const ProxyAuthorizationHeader = "proxy-authorization" // Once established, it'll invoke PerRPCCredentialsFuture.Store() on perRPCCredentials to populate it with the appropriate values. func MaterializeCredentials(tokenSource oauth2.TokenSource, cfg *Config, authorizationMetadataKey string, perRPCCredentials *PerRPCCredentialsFuture) error { - _, err := tokenSource.Token() if err != nil { return fmt.Errorf("failed to issue token. Error: %w", err) @@ -35,6 +35,19 @@ func MaterializeCredentials(tokenSource oauth2.TokenSource, cfg *Config, authori return nil } +// MaterializeInMemoryCredentials initializes the perRPCCredentials with the token source containing in memory cached token. +// This path doesn't perform the token refresh and only build the cred source with cached token. +func MaterializeInMemoryCredentials(ctx context.Context, cfg *Config, tokenCache cache.TokenCache, + perRPCCredentials *PerRPCCredentialsFuture, authorizationMetadataKey string) error { + tokenSource, err := NewInMemoryTokenSourceProvider(tokenCache).GetTokenSource(ctx) + if err != nil { + return fmt.Errorf("failed to get token source. Error: %w", err) + } + wrappedTokenSource := NewCustomHeaderTokenSource(tokenSource, cfg.UseInsecureConnection, authorizationMetadataKey) + perRPCCredentials.Store(wrappedTokenSource) + return nil +} + func GetProxyTokenSource(ctx context.Context, cfg *Config) (oauth2.TokenSource, error) { tokenSourceProvider, err := NewExternalTokenSourceProvider(cfg.ProxyCommand) if err != nil { @@ -93,7 +106,7 @@ func setHTTPClientContext(ctx context.Context, cfg *Config, proxyCredentialsFutu transport.Proxy = http.ProxyURL(&cfg.HTTPProxyURL.URL) } - if cfg.ProxyCommand != nil && len(cfg.ProxyCommand) > 0 { + if len(cfg.ProxyCommand) > 0 { httpClient.Transport = &proxyAuthTransport{ transport: transport, proxyCredentialsFuture: proxyCredentialsFuture, @@ -130,7 +143,7 @@ func (o *OauthMetadataProvider) getTokenSourceAndMetadata(cfg *Config, tokenCach if err != nil { return fmt.Errorf("failed to fetch client metadata. Error: %v", err) } - authorizationMetadataKey = clientMetadata.AuthorizationMetadataKey + authorizationMetadataKey = clientMetadata.GetAuthorizationMetadataKey() } tokenSource, err := tokenSourceProvider.GetTokenSource(ctx) @@ -152,6 +165,7 @@ func (o *OauthMetadataProvider) GetOauthMetadata(cfg *Config, tokenCache cache.T if err != nil { logger.Errorf(context.Background(), "Failed to load token related config. Error: %v", err) } + logger.Debugf(context.Background(), "Successfully loaded token related metadata") }) if err != nil { return err @@ -176,22 +190,21 @@ func NewAuthInterceptor(cfg *Config, tokenCache cache.TokenCache, credentialsFut } return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { - ctx = setHTTPClientContext(ctx, cfg, proxyCredentialsFuture) - // If there is already a token in the cache (e.g. key-ring), we should use it immediately... t, _ := tokenCache.GetToken() if t != nil { + err := oauthMetadataProvider.GetOauthMetadata(cfg, tokenCache, proxyCredentialsFuture) if err != nil { return err } authorizationMetadataKey := oauthMetadataProvider.authorizationMetadataKey - tokenSource := oauthMetadataProvider.tokenSource - - err = MaterializeCredentials(tokenSource, cfg, authorizationMetadataKey, credentialsFuture) - if err != nil { - return fmt.Errorf("failed to materialize credentials. Error: %v", err) + if isValid := utils.Valid(t); isValid { + err := MaterializeInMemoryCredentials(ctx, cfg, tokenCache, credentialsFuture, authorizationMetadataKey) + if err != nil { + return fmt.Errorf("failed to materialize credentials. Error: %v", err) + } } } @@ -208,13 +221,11 @@ func NewAuthInterceptor(cfg *Config, tokenCache cache.TokenCache, credentialsFut } authorizationMetadataKey := oauthMetadataProvider.authorizationMetadataKey tokenSource := oauthMetadataProvider.tokenSource - err = func() error { if !tokenCache.TryLock() { tokenCache.CondWait() return nil } - defer tokenCache.Unlock() _, err := tokenCache.PurgeIfEquals(t) if err != nil && !errors.Is(err, cache.ErrNotFound) { @@ -227,7 +238,7 @@ func NewAuthInterceptor(cfg *Config, tokenCache cache.TokenCache, credentialsFut if newErr != nil { errString := fmt.Sprintf("authentication error! Original Error: %v, Auth Error: %v", err, newErr) logger.Errorf(ctx, errString) - return fmt.Errorf(errString) + return fmt.Errorf(errString) //nolint } tokenCache.CondBroadcast() @@ -237,6 +248,7 @@ func NewAuthInterceptor(cfg *Config, tokenCache cache.TokenCache, credentialsFut if err != nil { return err } + return invoker(ctx, method, req, reply, cc, opts...) } } @@ -257,6 +269,7 @@ func NewProxyAuthInterceptor(cfg *Config, proxyCredentialsFuture *PerRPCCredenti } return invoker(ctx, method, req, reply, cc, opts...) } + return err } } diff --git a/flyteidl/clients/go/admin/auth_interceptor_test.go b/flyteidl/clients/go/admin/auth_interceptor_test.go index b03171c825..0dee7428bc 100644 --- a/flyteidl/clients/go/admin/auth_interceptor_test.go +++ b/flyteidl/clients/go/admin/auth_interceptor_test.go @@ -2,17 +2,16 @@ package admin import ( "context" - "encoding/json" "errors" "fmt" "io" "net" "net/http" "net/url" - "os" "strings" "sync" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -24,7 +23,7 @@ import ( "github.com/flyteorg/flyte/flyteidl/clients/go/admin/cache/mocks" adminMocks "github.com/flyteorg/flyte/flyteidl/clients/go/admin/mocks" - + "github.com/flyteorg/flyte/flyteidl/clients/go/admin/utils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/config" "github.com/flyteorg/flyte/flytestdlib/logger" @@ -137,10 +136,7 @@ func newAuthMetadataServer(t testing.TB, grpcPort int, httpPort int, impl servic } func Test_newAuthInterceptor(t *testing.T) { - plan, _ := os.ReadFile("tokenorchestrator/testdata/token.json") - var tokenData oauth2.Token - err := json.Unmarshal(plan, &tokenData) - assert.NoError(t, err) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(20*time.Minute)) t.Run("Other Error", func(t *testing.T) { ctx := context.Background() httpPort := rand.IntnRange(10000, 60000) @@ -164,7 +160,8 @@ func Test_newAuthInterceptor(t *testing.T) { f := NewPerRPCCredentialsFuture() p := NewPerRPCCredentialsFuture() mockTokenCache := &mocks.TokenCache{} - mockTokenCache.OnGetTokenMatch().Return(&tokenData, nil) + + mockTokenCache.OnGetTokenMatch().Return(tokenData, nil) mockTokenCache.OnSaveTokenMatch(mock.Anything).Return(nil) interceptor := NewAuthInterceptor(&Config{ Endpoint: config.URL{URL: *u}, diff --git a/flyteidl/clients/go/admin/client.go b/flyteidl/clients/go/admin/client.go index 9f14d49dee..6f6b5d46fb 100644 --- a/flyteidl/clients/go/admin/client.go +++ b/flyteidl/clients/go/admin/client.go @@ -73,7 +73,7 @@ func GetAdditionalAdminClientConfigOptions(cfg *Config) []grpc.DialOption { opts = append(opts, grpc.WithBackoffConfig(backoffConfig)) timeoutDialOption := grpcRetry.WithPerRetryTimeout(cfg.PerRetryTimeout.Duration) - maxRetriesOption := grpcRetry.WithMax(uint(cfg.MaxRetries)) + maxRetriesOption := grpcRetry.WithMax(uint(cfg.MaxRetries)) // #nosec G115 retryInterceptor := grpcRetry.UnaryClientInterceptor(timeoutDialOption, maxRetriesOption) // We only make unary calls in this client, no streaming calls. We can add a streaming interceptor if admin @@ -101,7 +101,7 @@ func getAuthenticationDialOption(ctx context.Context, cfg *Config, tokenSourcePr if err != nil { return nil, fmt.Errorf("failed to fetch client metadata. Error: %v", err) } - authorizationMetadataKey = clientMetadata.AuthorizationMetadataKey + authorizationMetadataKey = clientMetadata.GetAuthorizationMetadataKey() } tokenSource, err := tokenSourceProvider.GetTokenSource(ctx) @@ -157,7 +157,7 @@ func NewAdminConnection(ctx context.Context, cfg *Config, proxyCredentialsFuture opts = append(opts, GetAdditionalAdminClientConfigOptions(cfg)...) - if cfg.ProxyCommand != nil && len(cfg.ProxyCommand) > 0 { + if len(cfg.ProxyCommand) > 0 { opts = append(opts, grpc.WithChainUnaryInterceptor(NewProxyAuthInterceptor(cfg, proxyCredentialsFuture))) opts = append(opts, grpc.WithPerRPCCredentials(proxyCredentialsFuture)) } diff --git a/flyteidl/clients/go/admin/client_test.go b/flyteidl/clients/go/admin/client_test.go index 042a826692..e61f066c26 100644 --- a/flyteidl/clients/go/admin/client_test.go +++ b/flyteidl/clients/go/admin/client_test.go @@ -2,13 +2,10 @@ package admin import ( "context" - "encoding/json" "errors" "fmt" - "io/ioutil" "net/http" "net/url" - "os" "testing" "time" @@ -24,6 +21,7 @@ import ( "github.com/flyteorg/flyte/flyteidl/clients/go/admin/oauth" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/pkce" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/tokenorchestrator" + "github.com/flyteorg/flyte/flyteidl/clients/go/admin/utils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/config" "github.com/flyteorg/flyte/flytestdlib/logger" @@ -231,15 +229,11 @@ func TestGetAuthenticationDialOptionPkce(t *testing.T) { RedirectUri: "http://localhost:54545/callback", } http.DefaultServeMux = http.NewServeMux() - plan, _ := os.ReadFile("tokenorchestrator/testdata/token.json") - var tokenData oauth2.Token - err := json.Unmarshal(plan, &tokenData) - assert.NoError(t, err) - tokenData.Expiry = time.Now().Add(time.Minute) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(time.Minute)) t.Run("cache hit", func(t *testing.T) { mockTokenCache := new(cachemocks.TokenCache) mockAuthClient := new(mocks.AuthMetadataServiceClient) - mockTokenCache.OnGetTokenMatch().Return(&tokenData, nil) + mockTokenCache.OnGetTokenMatch().Return(tokenData, nil) mockTokenCache.OnSaveTokenMatch(mock.Anything).Return(nil) mockAuthClient.OnGetOAuth2MetadataMatch(mock.Anything, mock.Anything).Return(metadata, nil) mockAuthClient.OnGetPublicClientConfigMatch(mock.Anything, mock.Anything).Return(clientMetatadata, nil) @@ -249,11 +243,11 @@ func TestGetAuthenticationDialOptionPkce(t *testing.T) { assert.NotNil(t, dialOption) assert.Nil(t, err) }) - tokenData.Expiry = time.Now().Add(-time.Minute) t.Run("cache miss auth failure", func(t *testing.T) { + tokenData = utils.GenTokenWithCustomExpiry(t, time.Now().Add(-time.Minute)) mockTokenCache := new(cachemocks.TokenCache) mockAuthClient := new(mocks.AuthMetadataServiceClient) - mockTokenCache.OnGetTokenMatch().Return(&tokenData, nil) + mockTokenCache.OnGetTokenMatch().Return(tokenData, nil) mockTokenCache.OnSaveTokenMatch(mock.Anything).Return(nil) mockTokenCache.On("Lock").Return() mockTokenCache.On("Unlock").Return() @@ -284,14 +278,11 @@ func Test_getPkceAuthTokenSource(t *testing.T) { mockAuthClient.OnGetPublicClientConfigMatch(mock.Anything, mock.Anything).Return(clientMetatadata, nil) t.Run("cached token expired", func(t *testing.T) { - plan, _ := ioutil.ReadFile("tokenorchestrator/testdata/token.json") - var tokenData oauth2.Token - err := json.Unmarshal(plan, &tokenData) - assert.NoError(t, err) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(-time.Minute)) // populate the cache tokenCache := cache.NewTokenCacheInMemoryProvider() - assert.NoError(t, tokenCache.SaveToken(&tokenData)) + assert.NoError(t, tokenCache.SaveToken(tokenData)) baseOrchestrator := tokenorchestrator.BaseTokenOrchestrator{ ClientConfig: &oauth.Config{ diff --git a/flyteidl/clients/go/admin/oauth/config.go b/flyteidl/clients/go/admin/oauth/config.go index f0a8b9afa4..94055f678a 100644 --- a/flyteidl/clients/go/admin/oauth/config.go +++ b/flyteidl/clients/go/admin/oauth/config.go @@ -30,16 +30,16 @@ func BuildConfigFromMetadataService(ctx context.Context, authMetadataClient serv clientConf = &Config{ Config: &oauth2.Config{ - ClientID: clientResp.ClientId, - RedirectURL: clientResp.RedirectUri, - Scopes: clientResp.Scopes, + ClientID: clientResp.GetClientId(), + RedirectURL: clientResp.GetRedirectUri(), + Scopes: clientResp.GetScopes(), Endpoint: oauth2.Endpoint{ - TokenURL: oauthMetaResp.TokenEndpoint, - AuthURL: oauthMetaResp.AuthorizationEndpoint, + TokenURL: oauthMetaResp.GetTokenEndpoint(), + AuthURL: oauthMetaResp.GetAuthorizationEndpoint(), }, }, - DeviceEndpoint: oauthMetaResp.DeviceAuthorizationEndpoint, - Audience: clientResp.Audience, + DeviceEndpoint: oauthMetaResp.GetDeviceAuthorizationEndpoint(), + Audience: clientResp.GetAudience(), } return clientConf, nil diff --git a/flyteidl/clients/go/admin/token_source_provider.go b/flyteidl/clients/go/admin/token_source_provider.go index 4ecfa59215..b0ab0ce3e1 100644 --- a/flyteidl/clients/go/admin/token_source_provider.go +++ b/flyteidl/clients/go/admin/token_source_provider.go @@ -20,6 +20,7 @@ import ( "github.com/flyteorg/flyte/flyteidl/clients/go/admin/externalprocess" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/pkce" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/tokenorchestrator" + "github.com/flyteorg/flyte/flyteidl/clients/go/admin/utils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/logger" ) @@ -53,7 +54,7 @@ func NewTokenSourceProvider(ctx context.Context, cfg *Config, tokenCache cache.T return nil, fmt.Errorf("failed to fetch auth metadata. Error: %v", err) } - tokenURL = metadata.TokenEndpoint + tokenURL = metadata.GetTokenEndpoint() } scopes := cfg.Scopes @@ -66,11 +67,11 @@ func NewTokenSourceProvider(ctx context.Context, cfg *Config, tokenCache cache.T } // Update scopes from publicClientConfig if len(scopes) == 0 { - scopes = publicClientConfig.Scopes + scopes = publicClientConfig.GetScopes() } // Update audience from publicClientConfig if cfg.UseAudienceFromAdmin { - audienceValue = publicClientConfig.Audience + audienceValue = publicClientConfig.GetAudience() } } @@ -229,8 +230,14 @@ func (s *customTokenSource) Token() (*oauth2.Token, error) { s.mu.Lock() defer s.mu.Unlock() - if token, err := s.tokenCache.GetToken(); err == nil && token.Valid() { - return token, nil + token, err := s.tokenCache.GetToken() + if err != nil { + logger.Warnf(s.ctx, "failed to get token from cache: %v", err) + } else { + if isValid := utils.Valid(token); isValid { + logger.Infof(context.Background(), "retrieved token from cache with expiry %v", token.Expiry) + return token, nil + } } totalAttempts := s.cfg.MaxRetries + 1 // Add one for initial request attempt @@ -238,19 +245,21 @@ func (s *customTokenSource) Token() (*oauth2.Token, error) { Duration: s.cfg.PerRetryTimeout.Duration, Steps: totalAttempts, } - var token *oauth2.Token - err := retry.OnError(backoff, func(err error) bool { + + err = retry.OnError(backoff, func(err error) bool { return err != nil }, func() (err error) { token, err = s.new.Token() if err != nil { - logger.Infof(s.ctx, "failed to get token: %w", err) - return fmt.Errorf("failed to get token: %w", err) + logger.Infof(s.ctx, "failed to get new token: %w", err) + return fmt.Errorf("failed to get new token: %w", err) } + logger.Infof(context.Background(), "Fetched new token with expiry %v", token.Expiry) return nil }) if err != nil { - return nil, err + logger.Warnf(s.ctx, "failed to get new token: %v", err) + return nil, fmt.Errorf("failed to get new token: %w", err) } logger.Infof(s.ctx, "retrieved token with expiry %v", token.Expiry) @@ -262,6 +271,29 @@ func (s *customTokenSource) Token() (*oauth2.Token, error) { return token, nil } +type InMemoryTokenSourceProvider struct { + tokenCache cache.TokenCache +} + +func NewInMemoryTokenSourceProvider(tokenCache cache.TokenCache) TokenSourceProvider { + return InMemoryTokenSourceProvider{tokenCache: tokenCache} +} + +func (i InMemoryTokenSourceProvider) GetTokenSource(ctx context.Context) (oauth2.TokenSource, error) { + return GetInMemoryAuthTokenSource(ctx, i.tokenCache) +} + +// GetInMemoryAuthTokenSource Returns the token source with cached token +func GetInMemoryAuthTokenSource(ctx context.Context, tokenCache cache.TokenCache) (oauth2.TokenSource, error) { + authToken, err := tokenCache.GetToken() + if err != nil { + return nil, err + } + return &pkce.SimpleTokenSource{ + CachedToken: authToken, + }, nil +} + type DeviceFlowTokenSourceProvider struct { tokenOrchestrator deviceflow.TokenOrchestrator } diff --git a/flyteidl/clients/go/admin/token_source_provider_test.go b/flyteidl/clients/go/admin/token_source_provider_test.go index 43d0fdd928..941b697e75 100644 --- a/flyteidl/clients/go/admin/token_source_provider_test.go +++ b/flyteidl/clients/go/admin/token_source_provider_test.go @@ -13,6 +13,7 @@ import ( tokenCacheMocks "github.com/flyteorg/flyte/flyteidl/clients/go/admin/cache/mocks" adminMocks "github.com/flyteorg/flyte/flyteidl/clients/go/admin/mocks" + "github.com/flyteorg/flyte/flyteidl/clients/go/admin/utils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" ) @@ -88,9 +89,9 @@ func TestCustomTokenSource_Token(t *testing.T) { minuteAgo := time.Now().Add(-time.Minute) hourAhead := time.Now().Add(time.Hour) twoHourAhead := time.Now().Add(2 * time.Hour) - invalidToken := oauth2.Token{AccessToken: "foo", Expiry: minuteAgo} - validToken := oauth2.Token{AccessToken: "foo", Expiry: hourAhead} - newToken := oauth2.Token{AccessToken: "foo", Expiry: twoHourAhead} + invalidToken := utils.GenTokenWithCustomExpiry(t, minuteAgo) + validToken := utils.GenTokenWithCustomExpiry(t, hourAhead) + newToken := utils.GenTokenWithCustomExpiry(t, twoHourAhead) tests := []struct { name string @@ -101,24 +102,24 @@ func TestCustomTokenSource_Token(t *testing.T) { { name: "no cached token", token: nil, - newToken: &newToken, - expectedToken: &newToken, + newToken: newToken, + expectedToken: newToken, }, { name: "cached token valid", - token: &validToken, + token: validToken, newToken: nil, - expectedToken: &validToken, + expectedToken: validToken, }, { name: "cached token expired", - token: &invalidToken, - newToken: &newToken, - expectedToken: &newToken, + token: invalidToken, + newToken: newToken, + expectedToken: newToken, }, { name: "failed new token", - token: &invalidToken, + token: invalidToken, newToken: nil, expectedToken: nil, }, @@ -138,7 +139,7 @@ func TestCustomTokenSource_Token(t *testing.T) { assert.True(t, ok) mockSource := &adminMocks.TokenSource{} - if test.token != &validToken { + if test.token != validToken { if test.newToken != nil { mockSource.OnToken().Return(test.newToken, nil) } else { diff --git a/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator.go b/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator.go index 4fd3fa476c..441127ce07 100644 --- a/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator.go +++ b/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator.go @@ -8,6 +8,7 @@ import ( "github.com/flyteorg/flyte/flyteidl/clients/go/admin/cache" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/oauth" + "github.com/flyteorg/flyte/flyteidl/clients/go/admin/utils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/config" "github.com/flyteorg/flyte/flytestdlib/logger" @@ -52,7 +53,8 @@ func (t BaseTokenOrchestrator) FetchTokenFromCacheOrRefreshIt(ctx context.Contex return nil, err } - if token.Valid() { + if isValid := utils.Valid(token); isValid { + logger.Infof(context.Background(), "retrieved token from cache with expiry %v", token.Expiry) return token, nil } diff --git a/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator_test.go b/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator_test.go index 0a1a9f4985..d7e5ca07b2 100644 --- a/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator_test.go +++ b/flyteidl/clients/go/admin/tokenorchestrator/base_token_orchestrator_test.go @@ -2,8 +2,6 @@ package tokenorchestrator import ( "context" - "encoding/json" - "os" "testing" "time" @@ -15,6 +13,7 @@ import ( cacheMocks "github.com/flyteorg/flyte/flyteidl/clients/go/admin/cache/mocks" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/mocks" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/oauth" + "github.com/flyteorg/flyte/flyteidl/clients/go/admin/utils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/config" ) @@ -32,12 +31,9 @@ func TestRefreshTheToken(t *testing.T) { TokenCache: tokenCacheProvider, } - plan, _ := os.ReadFile("testdata/token.json") - var tokenData oauth2.Token - err := json.Unmarshal(plan, &tokenData) - assert.Nil(t, err) t.Run("bad url in Config", func(t *testing.T) { - refreshedToken, err := orchestrator.RefreshToken(ctx, &tokenData) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(-20*time.Minute)) + refreshedToken, err := orchestrator.RefreshToken(ctx, tokenData) assert.Nil(t, refreshedToken) assert.NotNil(t, err) }) @@ -72,12 +68,8 @@ func TestFetchFromCache(t *testing.T) { tokenCacheProvider := cache.NewTokenCacheInMemoryProvider() orchestrator, err := NewBaseTokenOrchestrator(ctx, tokenCacheProvider, mockAuthClient) assert.NoError(t, err) - fileData, _ := os.ReadFile("testdata/token.json") - var tokenData oauth2.Token - err = json.Unmarshal(fileData, &tokenData) - assert.Nil(t, err) - tokenData.Expiry = time.Now().Add(20 * time.Minute) - err = tokenCacheProvider.SaveToken(&tokenData) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(20*time.Minute)) + err = tokenCacheProvider.SaveToken(tokenData) assert.Nil(t, err) cachedToken, err := orchestrator.FetchTokenFromCacheOrRefreshIt(ctx, config.Duration{Duration: 5 * time.Minute}) assert.Nil(t, err) @@ -89,12 +81,8 @@ func TestFetchFromCache(t *testing.T) { tokenCacheProvider := cache.NewTokenCacheInMemoryProvider() orchestrator, err := NewBaseTokenOrchestrator(ctx, tokenCacheProvider, mockAuthClient) assert.NoError(t, err) - fileData, _ := os.ReadFile("testdata/token.json") - var tokenData oauth2.Token - err = json.Unmarshal(fileData, &tokenData) - assert.Nil(t, err) - tokenData.Expiry = time.Now().Add(-20 * time.Minute) - err = tokenCacheProvider.SaveToken(&tokenData) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(-20*time.Minute)) + err = tokenCacheProvider.SaveToken(tokenData) assert.Nil(t, err) _, err = orchestrator.FetchTokenFromCacheOrRefreshIt(ctx, config.Duration{Duration: 5 * time.Minute}) assert.NotNil(t, err) @@ -104,12 +92,8 @@ func TestFetchFromCache(t *testing.T) { mockTokenCacheProvider := new(cacheMocks.TokenCache) orchestrator, err := NewBaseTokenOrchestrator(ctx, mockTokenCacheProvider, mockAuthClient) assert.NoError(t, err) - fileData, _ := os.ReadFile("testdata/token.json") - var tokenData oauth2.Token - err = json.Unmarshal(fileData, &tokenData) - assert.Nil(t, err) - tokenData.Expiry = time.Now().Add(20 * time.Minute) - mockTokenCacheProvider.OnGetTokenMatch(mock.Anything).Return(&tokenData, nil) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(20*time.Minute)) + mockTokenCacheProvider.OnGetTokenMatch(mock.Anything).Return(tokenData, nil) mockTokenCacheProvider.OnSaveTokenMatch(mock.Anything).Return(nil) assert.Nil(t, err) refreshedToken, err := orchestrator.FetchTokenFromCacheOrRefreshIt(ctx, config.Duration{Duration: 5 * time.Minute}) @@ -122,12 +106,8 @@ func TestFetchFromCache(t *testing.T) { mockTokenCacheProvider := new(cacheMocks.TokenCache) orchestrator, err := NewBaseTokenOrchestrator(ctx, mockTokenCacheProvider, mockAuthClient) assert.NoError(t, err) - fileData, _ := os.ReadFile("testdata/token.json") - var tokenData oauth2.Token - err = json.Unmarshal(fileData, &tokenData) - assert.Nil(t, err) - tokenData.Expiry = time.Now().Add(20 * time.Minute) - mockTokenCacheProvider.OnGetTokenMatch(mock.Anything).Return(&tokenData, nil) + tokenData := utils.GenTokenWithCustomExpiry(t, time.Now().Add(20*time.Minute)) + mockTokenCacheProvider.OnGetTokenMatch(mock.Anything).Return(tokenData, nil) assert.Nil(t, err) refreshedToken, err := orchestrator.FetchTokenFromCacheOrRefreshIt(ctx, config.Duration{Duration: 5 * time.Minute}) assert.Nil(t, err) diff --git a/flyteidl/clients/go/admin/tokenorchestrator/testdata/token.json b/flyteidl/clients/go/admin/tokenorchestrator/testdata/token.json deleted file mode 100644 index 721cecc5f6..0000000000 --- a/flyteidl/clients/go/admin/tokenorchestrator/testdata/token.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "access_token":"eyJhbGciOiJSUzI1NiIsImtleV9pZCI6IjlLZlNILXphZjRjY1dmTlNPbm91YmZUbnItVW5kMHVuY3ctWF9KNUJVdWciLCJ0eXAiOiJKV1QifQ.eyJhdWQiOlsiaHR0cHM6Ly9kZW1vLm51Y2x5ZGUuaW8iXSwiY2xpZW50X2lkIjoiZmx5dGVjdGwiLCJleHAiOjE2MTk1Mjk5MjcsImZvcm0iOnsiY29kZV9jaGFsbGVuZ2UiOiJ2bWNxazArZnJRS3Vvb2FMUHZwUDJCeUtod2VKR2VaeG1mdGtkMml0T042Tk13SVBQNWwySmNpWDd3NTdlaS9iVW1LTWhPSjJVUERnK0F5RXRaTG94SFJiMDl1cWRKSSIsImNvZGVfY2hhbGxlbmdlX21ldGhvZCI6IlN2WEgyeDh2UDUrSkJxQ0NjT2dCL0hNWjdLSmE3bkdLMDBaUVA0ekd4WGcifSwiaWF0IjoxNjE5NTAyNTM1LCJpc3MiOiJodHRwczovL2RlbW8ubnVjbHlkZS5pbyIsImp0aSI6IjQzMTM1ZWY2LTA5NjEtNGFlZC1hOTYxLWQyZGI1YWJmM2U1YyIsInNjcCI6WyJvZmZsaW5lIiwiYWxsIiwiYWNjZXNzX3Rva2VuIl0sInN1YiI6IjExNDUyNzgxNTMwNTEyODk3NDQ3MCIsInVzZXJfaW5mbyI6eyJmYW1pbHlfbmFtZSI6Ik1haGluZHJha2FyIiwiZ2l2ZW5fbmFtZSI6IlByYWZ1bGxhIiwibmFtZSI6IlByYWZ1bGxhIE1haGluZHJha2FyIiwicGljdHVyZSI6Imh0dHBzOi8vbGgzLmdvb2dsZXVzZXJjb250ZW50LmNvbS9hLS9BT2gxNEdqdVQxazgtOGE1dkJHT0lGMWFEZ2hZbUZ4OGhEOUtOaVI1am5adT1zOTYtYyIsInN1YmplY3QiOiIxMTQ1Mjc4MTUzMDUxMjg5NzQ0NzAifX0.ojbUOy2tF6HL8fIp1FJAQchU2MimlVMr3EGVPxMvYyahpW5YsWh6mz7qn4vpEnBuYZDf6cTaN50pJ8krlDX9RqtxF3iEfV2ZYHwyKMThI9sWh_kEBgGwUpyHyk98ZeqQX1uFOH3iwwhR-lPPUlpgdFGzKsxfxeFLOtu1y0V7BgA08KFqgYzl0lJqDYWBkJh_wUAv5g_r0NzSQCsMqb-B3Lno5ScMnlA3SZ_Hg-XdW8hnFIlrwJj4Cv47j3fcZxpqLbTNDXWWogmRbJb3YPlgn_LEnRAyZnFERHKMCE9vaBSTu-1Qstp-gRTORjyV7l3y680dEygQS-99KV3OSBlz6g", - "token_type":"bearer", - "refresh_token":"eyJhbGciOiJSUzI1NiIsImtleV9pZCI6IjlLZlNILXphZjRjY1dmTlNPbm91YmZUbnItVW5kMHVuY3ctWF9KNUJVdWciLCJ0eXAiOiJKV1QifQ.eyJhdWQiOlsiaHR0cHM6Ly9kZW1vLm51Y2x5ZGUuaW8iXSwiY2xpZW50X2lkIjoiZmx5dGVjdGwiLCJleHAiOjE2MTk1MzM1MjcsImZvcm0iOnsiY29kZV9jaGFsbGVuZ2UiOiJ2bWNxazArZnJRS3Vvb2FMUHZwUDJCeUtod2VKR2VaeG1mdGtkMml0T042Tk13SVBQNWwySmNpWDd3NTdlaS9iVW1LTWhPSjJVUERnK0F5RXRaTG94SFJiMDl1cWRKSSIsImNvZGVfY2hhbGxlbmdlX21ldGhvZCI6IlN2WEgyeDh2UDUrSkJxQ0NjT2dCL0hNWjdLSmE3bkdLMDBaUVA0ekd4WGcifSwiaWF0IjoxNjE5NTAyNTM1LCJpc3MiOiJodHRwczovL2RlbW8ubnVjbHlkZS5pbyIsImp0aSI6IjQzMTM1ZWY2LTA5NjEtNGFlZC1hOTYxLWQyZGI1YWJmM2U1YyIsInNjcCI6WyJvZmZsaW5lIiwiZi5hbGwiLCJhY2Nlc3NfdG9rZW4iXSwic3ViIjoiMTE0NTI3ODE1MzA1MTI4OTc0NDcwIiwidXNlcl9pbmZvIjp7ImZhbWlseV9uYW1lIjoiTWFoaW5kcmFrYXIiLCJnaXZlbl9uYW1lIjoiUHJhZnVsbGEiLCJuYW1lIjoiUHJhZnVsbGEgTWFoaW5kcmFrYXIiLCJwaWN0dXJlIjoiaHR0cHM6Ly9saDMuZ29vZ2xldXNlcmNvbnRlbnQuY29tL2EtL0FPaDE0R2p1VDFrOC04YTV2QkdPSUYxYURnaFltRng4aEQ5S05pUjVqblp1PXM5Ni1jIiwic3ViamVjdCI6IjExNDUyNzgxNTMwNTEyODk3NDQ3MCJ9fQ.YKom5-gE4e84rJJIfxcpbMzgjZT33UZ27UTa1y8pK2BAWaPjIZtwudwDHQ5Rd3m0mJJWhBp0j0e8h9DvzBUdpsnGMXSCYKP-ag9y9k5OW59FMm9RqIakWHtj6NPnxGO1jAsaNCYePj8knR7pBLCLCse2taDHUJ8RU1F0DeHNr2y-JupgG5y1vjBcb-9eD8OwOSTp686_hm7XoJlxiKx8dj2O7HPH7M2pAHA_0bVrKKj7Y_s3fRhkm_Aq6LRdA-IiTl9xJQxgVUreejls9-RR9mSTKj6A81-Isz3qAUttVVaA4OT5OdW879_yT7OSLw_QwpXzNZ7qOR7OIpmL_xZXig", - "expiry":"2021-04-27T19:55:26.658635+05:30" -} \ No newline at end of file diff --git a/flyteidl/clients/go/admin/utils/test_utils.go b/flyteidl/clients/go/admin/utils/test_utils.go new file mode 100644 index 0000000000..000bbbebba --- /dev/null +++ b/flyteidl/clients/go/admin/utils/test_utils.go @@ -0,0 +1,24 @@ +package utils + +import ( + "testing" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/stretchr/testify/assert" + "golang.org/x/oauth2" +) + +func GenTokenWithCustomExpiry(t *testing.T, expiry time.Time) *oauth2.Token { + var signingKey = []byte("your_secret_key") + token := jwt.New(jwt.SigningMethodHS256) + claims := token.Claims.(jwt.MapClaims) + claims["exp"] = expiry.Unix() + tokenString, err := token.SignedString(signingKey) + assert.NoError(t, err) + return &oauth2.Token{ + AccessToken: tokenString, + Expiry: expiry, + TokenType: "bearer", + } +} diff --git a/flyteidl/clients/go/admin/utils/token_utils.go b/flyteidl/clients/go/admin/utils/token_utils.go new file mode 100644 index 0000000000..8c34cef00e --- /dev/null +++ b/flyteidl/clients/go/admin/utils/token_utils.go @@ -0,0 +1,52 @@ +package utils + +import ( + "context" + "fmt" + "time" + + "github.com/golang-jwt/jwt/v5" + "golang.org/x/oauth2" + + "github.com/flyteorg/flyte/flytestdlib/logger" +) + +// Ref : Taken from oAuth library implementation of expiry +// defaultExpiryDelta determines how earlier a token should be considered +// expired than its actual expiration time. It is used to avoid late +// expirations due to client-server time mismatches. +const defaultExpiryDelta = 10 * time.Second + +// Valid reports whether t is non-nil, has an AccessToken, and is not expired. +func Valid(t *oauth2.Token) bool { + if t == nil || t.AccessToken == "" { + return false + } + expiryDelta := defaultExpiryDelta + tokenExpiry, err := parseDateClaim(t.AccessToken) + if err != nil { + logger.Errorf(context.Background(), "parseDateClaim failed due to %v", err) + return false + } + logger.Debugf(context.Background(), "Token expiry : %v, Access token expiry : %v, Are the equal : %v", t.Expiry, tokenExpiry, tokenExpiry.Equal(t.Expiry)) + return !tokenExpiry.Add(-expiryDelta).Before(time.Now()) +} + +// parseDateClaim parses the JWT token string and extracts the expiration time +func parseDateClaim(tokenString string) (time.Time, error) { + // Parse the token + token, _, err := new(jwt.Parser).ParseUnverified(tokenString, jwt.MapClaims{}) + if err != nil { + return time.Time{}, err + } + + // Extract the claims + if claims, ok := token.Claims.(jwt.MapClaims); ok { + // Get the expiration time + if exp, ok := claims["exp"].(float64); ok { + return time.Unix(int64(exp), 0), nil + } + } + + return time.Time{}, fmt.Errorf("no expiration claim found in token") +} diff --git a/flyteidl/clients/go/assets/admin.swagger.json b/flyteidl/clients/go/assets/admin.swagger.json index c4f6f3ef7f..7de9f2a697 100644 --- a/flyteidl/clients/go/assets/admin.swagger.json +++ b/flyteidl/clients/go/assets/admin.swagger.json @@ -4249,6 +4249,15 @@ }, "title": "Sets custom attributes for a project, domain and workflow combination.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" }, + "ArrayNodeDataMode": { + "type": "string", + "enum": [ + "SINGLE_INPUT_FILE", + "INDIVIDUAL_INPUT_FILES" + ], + "default": "SINGLE_INPUT_FILE", + "description": " - SINGLE_INPUT_FILE: Indicates the ArrayNode's input is a list of input values that map to subNode executions.\nThe file path set for the subNode will be the ArrayNode's input file, but the in-memory\nvalue utilized in propeller will be the individual value for each subNode execution.\nSubNode executions need to be able to read in and parse the individual value to execute correctly.\n - INDIVIDUAL_INPUT_FILES: Indicates the ArrayNode's input is a list of input values that map to subNode executions.\nPropeller will create input files for each ArrayNode subNode by parsing the inputs and\nsetting the InputBindings on each subNodeSpec. Both the file path and in-memory input values will\nbe the individual value for each subNode execution." + }, "BlobTypeBlobDimensionality": { "type": "string", "enum": [ @@ -5705,6 +5714,10 @@ "is_array": { "type": "boolean", "description": "Boolean flag indicating if the node is an array node. This is intended to uniquely identify\narray nodes from other nodes which can have is_parent_node as true." + }, + "is_eager": { + "type": "boolean", + "description": "Whether this node is an eager node." } }, "title": "Represents additional attributes related to a Node Execution" @@ -6560,6 +6573,14 @@ "execution_mode": { "$ref": "#/definitions/coreArrayNodeExecutionMode", "description": "execution_mode determines the execution path for ArrayNode." + }, + "is_original_sub_node_interface": { + "type": "boolean", + "title": "Indicates whether the sub node's original interface was altered" + }, + "data_mode": { + "$ref": "#/definitions/ArrayNodeDataMode", + "title": "data_mode determines how input data is passed to the sub-nodes" } }, "description": "ArrayNode is a Flyte node type that simplifies the execution of a sub-node over a list of input\nvalues. An ArrayNode can be executed with configurable parallelism (separate from the parent\nworkflow) and can be configured to succeed when a certain number of sub-nodes succeed." @@ -6704,6 +6725,10 @@ "$ref": "#/definitions/coreBindingDataMap", "description": "A map of bindings. The key is always a string." }, + "offloaded_metadata": { + "$ref": "#/definitions/coreLiteralOffloadedMetadata", + "description": "Offloaded literal metadata\nWhen you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata.\nUsed for nodes that don't have promises from upstream nodes such as ArrayNode subNodes." + }, "union": { "$ref": "#/definitions/coreUnionInfo" } @@ -8269,6 +8294,10 @@ "type": "string" }, "description": "cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache." + }, + "is_eager": { + "type": "boolean", + "description": "is_eager indicates whether the task is eager or not.\nThis would be used by CreateTask endpoint." } }, "title": "Task Metadata" @@ -8664,6 +8693,16 @@ "$ref": "#/definitions/coreTaskLog" }, "title": "log information for the external resource execution" +<<<<<<< HEAD +======= + }, + "workflow_node_metadata": { + "$ref": "#/definitions/flyteidleventWorkflowNodeMetadata" + }, + "custom_info": { + "type": "object", + "title": "Extensible field for custom, plugin-specific info" +>>>>>>> 33bca79f0 (Send actor worker assignment in task events (#394)(#393)) } }, "description": "This message contains metadata about external resources produced or used by a specific task execution." @@ -8764,6 +8803,10 @@ "is_in_dynamic_chain": { "type": "boolean", "description": "Tasks and subworkflows (but not launch plans) that are run within a dynamic task are effectively independent of\nthe tasks that are registered in Admin's db. Confusingly, they are often identical, but sometimes they are not\neven registered at all. Similar to the target_entity field, at the time Admin receives this event, it has no idea\nif the relevant execution entity is was registered, or dynamic. This field indicates that the target_entity ID,\nas well as task IDs in any corresponding Task Executions, should not be used to looked up the task in Admin's db." + }, + "is_eager": { + "type": "boolean", + "description": "Whether this node launched an eager task." } } }, diff --git a/flyteidl/clients/go/coreutils/extract_literal.go b/flyteidl/clients/go/coreutils/extract_literal.go index 23302de9a3..08e534c5b7 100644 --- a/flyteidl/clients/go/coreutils/extract_literal.go +++ b/flyteidl/clients/go/coreutils/extract_literal.go @@ -28,11 +28,11 @@ import ( ) func ExtractFromLiteral(literal *core.Literal) (interface{}, error) { - switch literalValue := literal.Value.(type) { + switch literalValue := literal.GetValue().(type) { case *core.Literal_Scalar: - switch scalarValue := literalValue.Scalar.Value.(type) { + switch scalarValue := literalValue.Scalar.GetValue().(type) { case *core.Scalar_Primitive: - switch scalarPrimitive := scalarValue.Primitive.Value.(type) { + switch scalarPrimitive := scalarValue.Primitive.GetValue().(type) { case *core.Primitive_Integer: scalarPrimitiveInt := scalarPrimitive.Integer return scalarPrimitiveInt, nil @@ -57,16 +57,16 @@ func ExtractFromLiteral(literal *core.Literal) (interface{}, error) { case *core.Scalar_Binary: return scalarValue.Binary, nil case *core.Scalar_Blob: - return scalarValue.Blob.Uri, nil + return scalarValue.Blob.GetUri(), nil case *core.Scalar_Schema: - return scalarValue.Schema.Uri, nil + return scalarValue.Schema.GetUri(), nil case *core.Scalar_Generic: return scalarValue.Generic, nil case *core.Scalar_StructuredDataset: - return scalarValue.StructuredDataset.Uri, nil + return scalarValue.StructuredDataset.GetUri(), nil case *core.Scalar_Union: // extract the value of the union but not the actual union object - extractedVal, err := ExtractFromLiteral(scalarValue.Union.Value) + extractedVal, err := ExtractFromLiteral(scalarValue.Union.GetValue()) if err != nil { return nil, err } @@ -77,7 +77,7 @@ func ExtractFromLiteral(literal *core.Literal) (interface{}, error) { return nil, fmt.Errorf("unsupported literal scalar type %T", scalarValue) } case *core.Literal_Collection: - collectionValue := literalValue.Collection.Literals + collectionValue := literalValue.Collection.GetLiterals() collection := make([]interface{}, len(collectionValue)) for index, val := range collectionValue { if collectionElem, err := ExtractFromLiteral(val); err == nil { @@ -88,7 +88,7 @@ func ExtractFromLiteral(literal *core.Literal) (interface{}, error) { } return collection, nil case *core.Literal_Map: - mapLiteralValue := literalValue.Map.Literals + mapLiteralValue := literalValue.Map.GetLiterals() mapResult := make(map[string]interface{}, len(mapLiteralValue)) for key, val := range mapLiteralValue { if val, err := ExtractFromLiteral(val); err == nil { @@ -100,7 +100,7 @@ func ExtractFromLiteral(literal *core.Literal) (interface{}, error) { return mapResult, nil case *core.Literal_OffloadedMetadata: // Return the URI of the offloaded metadata to be used when displaying in flytectl - return literalValue.OffloadedMetadata.Uri, nil + return literalValue.OffloadedMetadata.GetUri(), nil } return nil, fmt.Errorf("unsupported literal type %T", literal) diff --git a/flyteidl/clients/go/coreutils/extract_literal_test.go b/flyteidl/clients/go/coreutils/extract_literal_test.go index 0cd4c2fb16..9d6e035775 100644 --- a/flyteidl/clients/go/coreutils/extract_literal_test.go +++ b/flyteidl/clients/go/coreutils/extract_literal_test.go @@ -148,9 +148,9 @@ func TestFetchLiteral(t *testing.T) { Fields: fieldsMap, } extractedStructValue := extractedLiteralVal.(*structpb.Struct) - assert.Equal(t, len(expectedStructVal.Fields), len(extractedStructValue.Fields)) - for key, val := range expectedStructVal.Fields { - assert.Equal(t, val.Kind, extractedStructValue.Fields[key].Kind) + assert.Equal(t, len(expectedStructVal.GetFields()), len(extractedStructValue.GetFields())) + for key, val := range expectedStructVal.GetFields() { + assert.Equal(t, val.GetKind(), extractedStructValue.GetFields()[key].GetKind()) } os.Unsetenv(FlyteUseOldDcFormat) }) @@ -174,9 +174,9 @@ func TestFetchLiteral(t *testing.T) { Fields: fieldsMap, } extractedStructValue := extractedLiteralVal.(*structpb.Struct) - assert.Equal(t, len(expectedStructVal.Fields), len(extractedStructValue.Fields)) - for key, val := range expectedStructVal.Fields { - assert.Equal(t, val.Kind, extractedStructValue.Fields[key].Kind) + assert.Equal(t, len(expectedStructVal.GetFields()), len(extractedStructValue.GetFields())) + for key, val := range expectedStructVal.GetFields() { + assert.Equal(t, val.GetKind(), extractedStructValue.GetFields()[key].GetKind()) } }) diff --git a/flyteidl/clients/go/coreutils/literals.go b/flyteidl/clients/go/coreutils/literals.go index 6f292d7118..310e389c73 100644 --- a/flyteidl/clients/go/coreutils/literals.go +++ b/flyteidl/clients/go/coreutils/literals.go @@ -306,20 +306,20 @@ func MakeDefaultLiteralForType(typ *core.LiteralType) (*core.Literal, error) { case *core.LiteralType_Schema: return MakeLiteralForType(typ, nil) case *core.LiteralType_UnionType: - if len(t.UnionType.Variants) == 0 { + if len(t.UnionType.GetVariants()) == 0 { return nil, errors.Errorf("Union type must have at least one variant") } // For union types, we just return the default for the first variant - val, err := MakeDefaultLiteralForType(t.UnionType.Variants[0]) + val, err := MakeDefaultLiteralForType(t.UnionType.GetVariants()[0]) if err != nil { - return nil, errors.Errorf("Failed to create default literal for first union type variant [%v]", t.UnionType.Variants[0]) + return nil, errors.Errorf("Failed to create default literal for first union type variant [%v]", t.UnionType.GetVariants()[0]) } res := &core.Literal{ Value: &core.Literal_Scalar{ Scalar: &core.Scalar{ Value: &core.Scalar_Union{ Union: &core.Union{ - Type: t.UnionType.Variants[0], + Type: t.UnionType.GetVariants()[0], Value: val, }, }, @@ -511,7 +511,7 @@ func MakeLiteralForBlob(path storage.DataReference, isDir bool, format string) * func MakeLiteralForType(t *core.LiteralType, v interface{}) (*core.Literal, error) { l := &core.Literal{} - switch newT := t.Type.(type) { + switch newT := t.GetType().(type) { case *core.LiteralType_MapValueType: newV, ok := v.(map[string]interface{}) if !ok { @@ -600,24 +600,24 @@ func MakeLiteralForType(t *core.LiteralType, v interface{}) (*core.Literal, erro return lv, nil case *core.LiteralType_Blob: - isDir := newT.Blob.Dimensionality == core.BlobType_MULTIPART - lv := MakeLiteralForBlob(storage.DataReference(fmt.Sprintf("%v", v)), isDir, newT.Blob.Format) + isDir := newT.Blob.GetDimensionality() == core.BlobType_MULTIPART + lv := MakeLiteralForBlob(storage.DataReference(fmt.Sprintf("%v", v)), isDir, newT.Blob.GetFormat()) return lv, nil case *core.LiteralType_Schema: - lv := MakeLiteralForSchema(storage.DataReference(fmt.Sprintf("%v", v)), newT.Schema.Columns) + lv := MakeLiteralForSchema(storage.DataReference(fmt.Sprintf("%v", v)), newT.Schema.GetColumns()) return lv, nil case *core.LiteralType_StructuredDatasetType: - lv := MakeLiteralForStructuredDataSet(storage.DataReference(fmt.Sprintf("%v", v)), newT.StructuredDatasetType.Columns, newT.StructuredDatasetType.Format) + lv := MakeLiteralForStructuredDataSet(storage.DataReference(fmt.Sprintf("%v", v)), newT.StructuredDatasetType.GetColumns(), newT.StructuredDatasetType.GetFormat()) return lv, nil case *core.LiteralType_EnumType: var newV string if v == nil { - if len(t.GetEnumType().Values) == 0 { + if len(t.GetEnumType().GetValues()) == 0 { return nil, fmt.Errorf("enum types need at least one value") } - newV = t.GetEnumType().Values[0] + newV = t.GetEnumType().GetValues()[0] } else { var ok bool newV, ok = v.(string) @@ -640,7 +640,7 @@ func MakeLiteralForType(t *core.LiteralType, v interface{}) (*core.Literal, erro case *core.LiteralType_UnionType: // Try different types in the variants, return the first one matched found := false - for _, subType := range newT.UnionType.Variants { + for _, subType := range newT.UnionType.GetVariants() { lv, err := MakeLiteralForType(subType, v) if err == nil { l = &core.Literal{ @@ -660,7 +660,7 @@ func MakeLiteralForType(t *core.LiteralType, v interface{}) (*core.Literal, erro } } if !found { - return nil, fmt.Errorf("incorrect union value [%s], supported values %+v", v, newT.UnionType.Variants) + return nil, fmt.Errorf("incorrect union value [%s], supported values %+v", v, newT.UnionType.GetVariants()) } default: return nil, fmt.Errorf("unsupported type %s", t.String()) diff --git a/flyteidl/clients/go/coreutils/literals_test.go b/flyteidl/clients/go/coreutils/literals_test.go index f2d8c9e5b2..3586e0e02f 100644 --- a/flyteidl/clients/go/coreutils/literals_test.go +++ b/flyteidl/clients/go/coreutils/literals_test.go @@ -27,42 +27,42 @@ func TestMakePrimitive(t *testing.T) { v := 1 p, err := MakePrimitive(v) assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetValue()).String()) assert.Equal(t, int64(v), p.GetInteger()) } { v := int64(1) p, err := MakePrimitive(v) assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetValue()).String()) assert.Equal(t, v, p.GetInteger()) } { v := 1.0 p, err := MakePrimitive(v) assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetValue()).String()) assert.Equal(t, v, p.GetFloatValue()) } { v := "blah" p, err := MakePrimitive(v) assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_StringValue", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_StringValue", reflect.TypeOf(p.GetValue()).String()) assert.Equal(t, v, p.GetStringValue()) } { v := true p, err := MakePrimitive(v) assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_Boolean", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_Boolean", reflect.TypeOf(p.GetValue()).String()) assert.Equal(t, v, p.GetBoolean()) } { v := time.Now() p, err := MakePrimitive(v) assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_Datetime", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_Datetime", reflect.TypeOf(p.GetValue()).String()) j, err := ptypes.TimestampProto(v) assert.NoError(t, err) assert.Equal(t, j, p.GetDatetime()) @@ -73,7 +73,7 @@ func TestMakePrimitive(t *testing.T) { v := time.Second * 10 p, err := MakePrimitive(v) assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_Duration", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_Duration", reflect.TypeOf(p.GetValue()).String()) assert.Equal(t, ptypes.DurationProto(v), p.GetDuration()) } { @@ -95,7 +95,7 @@ func TestMustMakePrimitive(t *testing.T) { { v := time.Second * 10 p := MustMakePrimitive(v) - assert.Equal(t, "*core.Primitive_Duration", reflect.TypeOf(p.Value).String()) + assert.Equal(t, "*core.Primitive_Duration", reflect.TypeOf(p.GetValue()).String()) assert.Equal(t, ptypes.DurationProto(v), p.GetDuration()) } } @@ -106,7 +106,7 @@ func TestMakePrimitiveLiteral(t *testing.T) { p, err := MakePrimitiveLiteral(v) assert.NoError(t, err) assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v, p.GetScalar().GetPrimitive().GetFloatValue()) } { @@ -129,7 +129,7 @@ func TestMustMakePrimitiveLiteral(t *testing.T) { v := 1.0 p := MustMakePrimitiveLiteral(v) assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v, p.GetScalar().GetPrimitive().GetFloatValue()) }) } @@ -138,14 +138,14 @@ func TestMakeLiteral(t *testing.T) { t.Run("Primitive", func(t *testing.T) { lit, err := MakeLiteral("test_string") assert.NoError(t, err) - assert.Equal(t, "*core.Primitive_StringValue", reflect.TypeOf(lit.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_StringValue", reflect.TypeOf(lit.GetScalar().GetPrimitive().GetValue()).String()) }) t.Run("Array", func(t *testing.T) { lit, err := MakeLiteral([]interface{}{1, 2, 3}) assert.NoError(t, err) assert.Equal(t, "*core.Literal_Collection", reflect.TypeOf(lit.GetValue()).String()) - assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(lit.GetCollection().Literals[0].GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(lit.GetCollection().GetLiterals()[0].GetScalar().GetPrimitive().GetValue()).String()) }) t.Run("Map", func(t *testing.T) { @@ -155,7 +155,7 @@ func TestMakeLiteral(t *testing.T) { }) assert.NoError(t, err) assert.Equal(t, "*core.Literal_Map", reflect.TypeOf(lit.GetValue()).String()) - assert.Equal(t, "*core.Literal_Collection", reflect.TypeOf(lit.GetMap().Literals["key1"].GetValue()).String()) + assert.Equal(t, "*core.Literal_Collection", reflect.TypeOf(lit.GetMap().GetLiterals()["key1"].GetValue()).String()) }) t.Run("Binary", func(t *testing.T) { @@ -167,7 +167,7 @@ func TestMakeLiteral(t *testing.T) { p, err := MakeLiteral(nil) assert.NoError(t, err) assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Scalar_NoneType", reflect.TypeOf(p.GetScalar().Value).String()) + assert.Equal(t, "*core.Scalar_NoneType", reflect.TypeOf(p.GetScalar().GetValue()).String()) }) } @@ -205,9 +205,9 @@ func TestMakeDefaultLiteralForType(t *testing.T) { l, err := MakeDefaultLiteralForType(&core.LiteralType{Type: &core.LiteralType_Simple{Simple: test.ty}}) assert.NoError(t, err) if test.isPrimitive { - assert.Equal(t, test.tyName, reflect.TypeOf(l.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, test.tyName, reflect.TypeOf(l.GetScalar().GetPrimitive().GetValue()).String()) } else { - assert.Equal(t, test.tyName, reflect.TypeOf(l.GetScalar().Value).String()) + assert.Equal(t, test.tyName, reflect.TypeOf(l.GetScalar().GetValue()).String()) } }) } @@ -221,7 +221,7 @@ func TestMakeDefaultLiteralForType(t *testing.T) { t.Run("Blob", func(t *testing.T) { l, err := MakeDefaultLiteralForType(&core.LiteralType{Type: &core.LiteralType_Blob{}}) assert.NoError(t, err) - assert.Equal(t, "*core.Scalar_Blob", reflect.TypeOf(l.GetScalar().Value).String()) + assert.Equal(t, "*core.Scalar_Blob", reflect.TypeOf(l.GetScalar().GetValue()).String()) }) t.Run("Collection", func(t *testing.T) { @@ -300,7 +300,7 @@ func TestMustMakeDefaultLiteralForType(t *testing.T) { t.Run("Blob", func(t *testing.T) { l := MustMakeDefaultLiteralForType(&core.LiteralType{Type: &core.LiteralType_Blob{}}) - assert.Equal(t, "*core.Scalar_Blob", reflect.TypeOf(l.GetScalar().Value).String()) + assert.Equal(t, "*core.Scalar_Blob", reflect.TypeOf(l.GetScalar().GetValue()).String()) }) } @@ -479,9 +479,9 @@ func TestMakeLiteralForType(t *testing.T) { Fields: fieldsMap, } extractedStructValue := extractedLiteralVal.(*structpb.Struct) - assert.Equal(t, len(expectedStructVal.Fields), len(extractedStructValue.Fields)) - for key, val := range expectedStructVal.Fields { - assert.Equal(t, val.Kind, extractedStructValue.Fields[key].Kind) + assert.Equal(t, len(expectedStructVal.GetFields()), len(extractedStructValue.GetFields())) + for key, val := range expectedStructVal.GetFields() { + assert.Equal(t, val.GetKind(), extractedStructValue.GetFields()[key].GetKind()) } os.Unsetenv(FlyteUseOldDcFormat) }) @@ -539,9 +539,9 @@ func TestMakeLiteralForType(t *testing.T) { // Now check if the Binary values match var expectedVal, actualVal map[string]interface{} - err = msgpack.Unmarshal(expectedBinary.Value, &expectedVal) + err = msgpack.Unmarshal(expectedBinary.GetValue(), &expectedVal) assert.NoError(t, err) - err = msgpack.Unmarshal(actualBinary.Value, &actualVal) + err = msgpack.Unmarshal(actualBinary.GetValue(), &actualVal) assert.NoError(t, err) // Finally, assert that the deserialized values are equal diff --git a/flyteidl/gen/pb-es/flyteidl/admin/agent_pb.ts b/flyteidl/gen/pb-es/flyteidl/admin/agent_pb.ts index f9f6c37564..ff858642dc 100644 --- a/flyteidl/gen/pb-es/flyteidl/admin/agent_pb.ts +++ b/flyteidl/gen/pb-es/flyteidl/admin/agent_pb.ts @@ -10,7 +10,7 @@ import { TaskNodeOverrides } from "../core/workflow_pb.js"; import { Identity } from "../core/security_pb.js"; import { LiteralMap } from "../core/literals_pb.js"; import { TaskTemplate } from "../core/tasks_pb.js"; -import { TaskExecution_Phase, TaskLog } from "../core/execution_pb.js"; +import { ExecutionError_ErrorKind, TaskExecution_Phase, TaskLog } from "../core/execution_pb.js"; import { ExecutionMetricResult } from "../core/metrics_pb.js"; /** @@ -637,6 +637,13 @@ export class Resource extends Message { */ customInfo?: Struct; + /** + * The error raised during execution + * + * @generated from field: flyteidl.admin.AgentError agent_error = 7; + */ + agentError?: AgentError; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -651,6 +658,7 @@ export class Resource extends Message { { no: 4, name: "log_links", kind: "message", T: TaskLog, repeated: true }, { no: 5, name: "phase", kind: "enum", T: proto3.getEnumType(TaskExecution_Phase) }, { no: 6, name: "custom_info", kind: "message", T: Struct }, + { no: 7, name: "agent_error", kind: "message", T: AgentError }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): Resource { @@ -1368,3 +1376,83 @@ export class GetTaskLogsResponse extends Message { } } +/** + * Error message to propagate detailed errors from agent executions to the execution + * engine. + * + * @generated from message flyteidl.admin.AgentError + */ +export class AgentError extends Message { + /** + * A simplified code for errors, so that we can provide a glossary of all possible errors. + * + * @generated from field: string code = 1; + */ + code = ""; + + /** + * An abstract error kind for this error. Defaults to Non_Recoverable if not specified. + * + * @generated from field: flyteidl.admin.AgentError.Kind kind = 3; + */ + kind = AgentError_Kind.NON_RECOVERABLE; + + /** + * Defines the origin of the error (system, user, unknown). + * + * @generated from field: flyteidl.core.ExecutionError.ErrorKind origin = 4; + */ + origin = ExecutionError_ErrorKind.UNKNOWN; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "flyteidl.admin.AgentError"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "code", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 3, name: "kind", kind: "enum", T: proto3.getEnumType(AgentError_Kind) }, + { no: 4, name: "origin", kind: "enum", T: proto3.getEnumType(ExecutionError_ErrorKind) }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): AgentError { + return new AgentError().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): AgentError { + return new AgentError().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): AgentError { + return new AgentError().fromJsonString(jsonString, options); + } + + static equals(a: AgentError | PlainMessage | undefined, b: AgentError | PlainMessage | undefined): boolean { + return proto3.util.equals(AgentError, a, b); + } +} + +/** + * Defines a generic error type that dictates the behavior of the retry strategy. + * + * @generated from enum flyteidl.admin.AgentError.Kind + */ +export enum AgentError_Kind { + /** + * @generated from enum value: NON_RECOVERABLE = 0; + */ + NON_RECOVERABLE = 0, + + /** + * @generated from enum value: RECOVERABLE = 1; + */ + RECOVERABLE = 1, +} +// Retrieve enum metadata with: proto3.getEnumType(AgentError_Kind) +proto3.util.setEnumType(AgentError_Kind, "flyteidl.admin.AgentError.Kind", [ + { no: 0, name: "NON_RECOVERABLE" }, + { no: 1, name: "RECOVERABLE" }, +]); + diff --git a/flyteidl/gen/pb-es/flyteidl/admin/node_execution_pb.ts b/flyteidl/gen/pb-es/flyteidl/admin/node_execution_pb.ts index 97b89426fe..6b6f8aa022 100644 --- a/flyteidl/gen/pb-es/flyteidl/admin/node_execution_pb.ts +++ b/flyteidl/gen/pb-es/flyteidl/admin/node_execution_pb.ts @@ -337,6 +337,13 @@ export class NodeExecutionMetaData extends Message { */ isArray = false; + /** + * Whether this node is an eager node. + * + * @generated from field: bool is_eager = 6; + */ + isEager = false; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -350,6 +357,7 @@ export class NodeExecutionMetaData extends Message { { no: 3, name: "spec_node_id", kind: "scalar", T: 9 /* ScalarType.STRING */ }, { no: 4, name: "is_dynamic", kind: "scalar", T: 8 /* ScalarType.BOOL */ }, { no: 5, name: "is_array", kind: "scalar", T: 8 /* ScalarType.BOOL */ }, + { no: 6, name: "is_eager", kind: "scalar", T: 8 /* ScalarType.BOOL */ }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): NodeExecutionMetaData { diff --git a/flyteidl/gen/pb-es/flyteidl/core/literals_pb.ts b/flyteidl/gen/pb-es/flyteidl/core/literals_pb.ts index 69fcde4375..f1a671045f 100644 --- a/flyteidl/gen/pb-es/flyteidl/core/literals_pb.ts +++ b/flyteidl/gen/pb-es/flyteidl/core/literals_pb.ts @@ -922,6 +922,16 @@ export class BindingData extends Message { */ value: BindingDataMap; case: "map"; + } | { + /** + * Offloaded literal metadata + * When you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata. + * Used for nodes that don't have promises from upstream nodes such as ArrayNode subNodes. + * + * @generated from field: flyteidl.core.LiteralOffloadedMetadata offloaded_metadata = 6; + */ + value: LiteralOffloadedMetadata; + case: "offloadedMetadata"; } | { case: undefined; value?: undefined } = { case: undefined }; /** @@ -941,6 +951,7 @@ export class BindingData extends Message { { no: 2, name: "collection", kind: "message", T: BindingDataCollection, oneof: "value" }, { no: 3, name: "promise", kind: "message", T: OutputReference, oneof: "value" }, { no: 4, name: "map", kind: "message", T: BindingDataMap, oneof: "value" }, + { no: 6, name: "offloaded_metadata", kind: "message", T: LiteralOffloadedMetadata, oneof: "value" }, { no: 5, name: "union", kind: "message", T: UnionInfo }, ]); diff --git a/flyteidl/gen/pb-es/flyteidl/core/tasks_pb.ts b/flyteidl/gen/pb-es/flyteidl/core/tasks_pb.ts index 5cc011314c..eb2156cce7 100644 --- a/flyteidl/gen/pb-es/flyteidl/core/tasks_pb.ts +++ b/flyteidl/gen/pb-es/flyteidl/core/tasks_pb.ts @@ -447,6 +447,14 @@ export class TaskMetadata extends Message { */ cacheIgnoreInputVars: string[] = []; + /** + * is_eager indicates whether the task is eager or not. + * This would be used by CreateTask endpoint. + * + * @generated from field: bool is_eager = 14; + */ + isEager = false; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -467,6 +475,7 @@ export class TaskMetadata extends Message { { no: 11, name: "tags", kind: "map", K: 9 /* ScalarType.STRING */, V: {kind: "scalar", T: 9 /* ScalarType.STRING */} }, { no: 12, name: "pod_template_name", kind: "scalar", T: 9 /* ScalarType.STRING */ }, { no: 13, name: "cache_ignore_input_vars", kind: "scalar", T: 9 /* ScalarType.STRING */, repeated: true }, + { no: 14, name: "is_eager", kind: "scalar", T: 8 /* ScalarType.BOOL */ }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): TaskMetadata { diff --git a/flyteidl/gen/pb-es/flyteidl/core/workflow_pb.ts b/flyteidl/gen/pb-es/flyteidl/core/workflow_pb.ts index 20c235c187..04295d0a16 100644 --- a/flyteidl/gen/pb-es/flyteidl/core/workflow_pb.ts +++ b/flyteidl/gen/pb-es/flyteidl/core/workflow_pb.ts @@ -4,7 +4,7 @@ // @ts-nocheck import type { BinaryReadOptions, FieldList, JsonReadOptions, JsonValue, PartialMessage, PlainMessage } from "@bufbuild/protobuf"; -import { Duration, Message, proto3 } from "@bufbuild/protobuf"; +import { BoolValue, Duration, Message, proto3 } from "@bufbuild/protobuf"; import { BooleanExpression } from "./condition_pb.js"; import { Error, LiteralType } from "./types_pb.js"; import { Identifier } from "./identifier_pb.js"; @@ -554,6 +554,20 @@ export class ArrayNode extends Message { */ executionMode = ArrayNode_ExecutionMode.MINIMAL_STATE; + /** + * Indicates whether the sub node's original interface was altered + * + * @generated from field: google.protobuf.BoolValue is_original_sub_node_interface = 6; + */ + isOriginalSubNodeInterface?: boolean; + + /** + * data_mode determines how input data is passed to the sub-nodes + * + * @generated from field: flyteidl.core.ArrayNode.DataMode data_mode = 7; + */ + dataMode = ArrayNode_DataMode.SINGLE_INPUT_FILE; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -567,6 +581,8 @@ export class ArrayNode extends Message { { no: 3, name: "min_successes", kind: "scalar", T: 13 /* ScalarType.UINT32 */, oneof: "success_criteria" }, { no: 4, name: "min_success_ratio", kind: "scalar", T: 2 /* ScalarType.FLOAT */, oneof: "success_criteria" }, { no: 5, name: "execution_mode", kind: "enum", T: proto3.getEnumType(ArrayNode_ExecutionMode) }, + { no: 6, name: "is_original_sub_node_interface", kind: "message", T: BoolValue }, + { no: 7, name: "data_mode", kind: "enum", T: proto3.getEnumType(ArrayNode_DataMode) }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): ArrayNode { @@ -612,6 +628,36 @@ proto3.util.setEnumType(ArrayNode_ExecutionMode, "flyteidl.core.ArrayNode.Execut { no: 1, name: "FULL_STATE" }, ]); +/** + * @generated from enum flyteidl.core.ArrayNode.DataMode + */ +export enum ArrayNode_DataMode { + /** + * Indicates the ArrayNode's input is a list of input values that map to subNode executions. + * The file path set for the subNode will be the ArrayNode's input file, but the in-memory + * value utilized in propeller will be the individual value for each subNode execution. + * SubNode executions need to be able to read in and parse the individual value to execute correctly. + * + * @generated from enum value: SINGLE_INPUT_FILE = 0; + */ + SINGLE_INPUT_FILE = 0, + + /** + * Indicates the ArrayNode's input is a list of input values that map to subNode executions. + * Propeller will create input files for each ArrayNode subNode by parsing the inputs and + * setting the InputBindings on each subNodeSpec. Both the file path and in-memory input values will + * be the individual value for each subNode execution. + * + * @generated from enum value: INDIVIDUAL_INPUT_FILES = 1; + */ + INDIVIDUAL_INPUT_FILES = 1, +} +// Retrieve enum metadata with: proto3.getEnumType(ArrayNode_DataMode) +proto3.util.setEnumType(ArrayNode_DataMode, "flyteidl.core.ArrayNode.DataMode", [ + { no: 0, name: "SINGLE_INPUT_FILE" }, + { no: 1, name: "INDIVIDUAL_INPUT_FILES" }, +]); + /** * Defines extra information about the Node. * diff --git a/flyteidl/gen/pb-es/flyteidl/event/cloudevents_pb.ts b/flyteidl/gen/pb-es/flyteidl/event/cloudevents_pb.ts index 295930930a..f4b6af94cd 100644 --- a/flyteidl/gen/pb-es/flyteidl/event/cloudevents_pb.ts +++ b/flyteidl/gen/pb-es/flyteidl/event/cloudevents_pb.ts @@ -54,6 +54,13 @@ export class CloudEventWorkflowExecution extends Message labels = 7; + */ + labels: { [key: string]: string } = {}; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -68,6 +75,7 @@ export class CloudEventWorkflowExecution extends Message): CloudEventWorkflowExecution { @@ -132,6 +140,13 @@ export class CloudEventNodeExecution extends Message { */ launchPlanId?: Identifier; + /** + * We can't have the ExecutionMetadata object directly because of import cycle + * + * @generated from field: map labels = 7; + */ + labels: { [key: string]: string } = {}; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -146,6 +161,7 @@ export class CloudEventNodeExecution extends Message { { no: 4, name: "artifact_ids", kind: "message", T: ArtifactID, repeated: true }, { no: 5, name: "principal", kind: "scalar", T: 9 /* ScalarType.STRING */ }, { no: 6, name: "launch_plan_id", kind: "message", T: Identifier }, + { no: 7, name: "labels", kind: "map", K: 9 /* ScalarType.STRING */, V: {kind: "scalar", T: 9 /* ScalarType.STRING */} }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): CloudEventNodeExecution { @@ -174,6 +190,13 @@ export class CloudEventTaskExecution extends Message { */ rawEvent?: TaskExecutionEvent; + /** + * We can't have the ExecutionMetadata object directly because of import cycle + * + * @generated from field: map labels = 2; + */ + labels: { [key: string]: string } = {}; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -183,6 +206,7 @@ export class CloudEventTaskExecution extends Message { static readonly typeName = "flyteidl.event.CloudEventTaskExecution"; static readonly fields: FieldList = proto3.util.newFieldList(() => [ { no: 1, name: "raw_event", kind: "message", T: TaskExecutionEvent }, + { no: 2, name: "labels", kind: "map", K: 9 /* ScalarType.STRING */, V: {kind: "scalar", T: 9 /* ScalarType.STRING */} }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): CloudEventTaskExecution { diff --git a/flyteidl/gen/pb-es/flyteidl/event/event_pb.ts b/flyteidl/gen/pb-es/flyteidl/event/event_pb.ts index 9e5fd39c1d..d7c6d77328 100644 --- a/flyteidl/gen/pb-es/flyteidl/event/event_pb.ts +++ b/flyteidl/gen/pb-es/flyteidl/event/event_pb.ts @@ -307,6 +307,13 @@ export class NodeExecutionEvent extends Message { */ isInDynamicChain = false; + /** + * Whether this node launched an eager task. + * + * @generated from field: bool is_eager = 25; + */ + isEager = false; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -339,6 +346,7 @@ export class NodeExecutionEvent extends Message { { no: 22, name: "is_array", kind: "scalar", T: 8 /* ScalarType.BOOL */ }, { no: 23, name: "target_entity", kind: "message", T: Identifier }, { no: 24, name: "is_in_dynamic_chain", kind: "scalar", T: 8 /* ScalarType.BOOL */ }, + { no: 25, name: "is_eager", kind: "scalar", T: 8 /* ScalarType.BOOL */ }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): NodeExecutionEvent { @@ -925,6 +933,28 @@ export class ExternalResourceInfo extends Message { */ logs: TaskLog[] = []; + /** + * Additional metadata to do with this event's node target based on the node type. We are + * explicitly not including the task_node_metadata here because it is not clear if it is needed. + * If we decide to include in the future, we should deprecate the cache_status field. + * + * @generated from oneof flyteidl.event.ExternalResourceInfo.target_metadata + */ + targetMetadata: { + /** + * @generated from field: flyteidl.event.WorkflowNodeMetadata workflow_node_metadata = 7; + */ + value: WorkflowNodeMetadata; + case: "workflowNodeMetadata"; + } | { case: undefined; value?: undefined } = { case: undefined }; + + /** + * Extensible field for custom, plugin-specific info + * + * @generated from field: google.protobuf.Struct custom_info = 8; + */ + customInfo?: Struct; + constructor(data?: PartialMessage) { super(); proto3.util.initPartial(data, this); @@ -939,6 +969,8 @@ export class ExternalResourceInfo extends Message { { no: 4, name: "phase", kind: "enum", T: proto3.getEnumType(TaskExecution_Phase) }, { no: 5, name: "cache_status", kind: "enum", T: proto3.getEnumType(CatalogCacheStatus) }, { no: 6, name: "logs", kind: "message", T: TaskLog, repeated: true }, + { no: 7, name: "workflow_node_metadata", kind: "message", T: WorkflowNodeMetadata, oneof: "target_metadata" }, + { no: 8, name: "custom_info", kind: "message", T: Struct }, ]); static fromBinary(bytes: Uint8Array, options?: Partial): ExternalResourceInfo { diff --git a/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go index 653fce6266..a49ae30acb 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go @@ -82,6 +82,53 @@ func (State) EnumDescriptor() ([]byte, []int) { return file_flyteidl_admin_agent_proto_rawDescGZIP(), []int{0} } +// Defines a generic error type that dictates the behavior of the retry strategy. +type AgentError_Kind int32 + +const ( + AgentError_NON_RECOVERABLE AgentError_Kind = 0 + AgentError_RECOVERABLE AgentError_Kind = 1 +) + +// Enum value maps for AgentError_Kind. +var ( + AgentError_Kind_name = map[int32]string{ + 0: "NON_RECOVERABLE", + 1: "RECOVERABLE", + } + AgentError_Kind_value = map[string]int32{ + "NON_RECOVERABLE": 0, + "RECOVERABLE": 1, + } +) + +func (x AgentError_Kind) Enum() *AgentError_Kind { + p := new(AgentError_Kind) + *p = x + return p +} + +func (x AgentError_Kind) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (AgentError_Kind) Descriptor() protoreflect.EnumDescriptor { + return file_flyteidl_admin_agent_proto_enumTypes[1].Descriptor() +} + +func (AgentError_Kind) Type() protoreflect.EnumType { + return &file_flyteidl_admin_agent_proto_enumTypes[1] +} + +func (x AgentError_Kind) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use AgentError_Kind.Descriptor instead. +func (AgentError_Kind) EnumDescriptor() ([]byte, []int) { + return file_flyteidl_admin_agent_proto_rawDescGZIP(), []int{24, 0} +} + // Represents a subset of runtime task execution metadata that are relevant to external plugins. type TaskExecutionMetadata struct { state protoimpl.MessageState @@ -778,6 +825,8 @@ type Resource struct { Phase core.TaskExecution_Phase `protobuf:"varint,5,opt,name=phase,proto3,enum=flyteidl.core.TaskExecution_Phase" json:"phase,omitempty"` // Custom data specific to the agent. CustomInfo *structpb.Struct `protobuf:"bytes,6,opt,name=custom_info,json=customInfo,proto3" json:"custom_info,omitempty"` + // The error raised during execution + AgentError *AgentError `protobuf:"bytes,7,opt,name=agent_error,json=agentError,proto3" json:"agent_error,omitempty"` } func (x *Resource) Reset() { @@ -855,6 +904,13 @@ func (x *Resource) GetCustomInfo() *structpb.Struct { return nil } +func (x *Resource) GetAgentError() *AgentError { + if x != nil { + return x.AgentError + } + return nil +} + // A message used to delete a task. type DeleteTaskRequest struct { state protoimpl.MessageState @@ -1712,6 +1768,74 @@ func (*GetTaskLogsResponse_Header) isGetTaskLogsResponse_Part() {} func (*GetTaskLogsResponse_Body) isGetTaskLogsResponse_Part() {} +// Error message to propagate detailed errors from agent executions to the execution +// engine. +type AgentError struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A simplified code for errors, so that we can provide a glossary of all possible errors. + Code string `protobuf:"bytes,1,opt,name=code,proto3" json:"code,omitempty"` + // An abstract error kind for this error. Defaults to Non_Recoverable if not specified. + Kind AgentError_Kind `protobuf:"varint,3,opt,name=kind,proto3,enum=flyteidl.admin.AgentError_Kind" json:"kind,omitempty"` + // Defines the origin of the error (system, user, unknown). + Origin core.ExecutionError_ErrorKind `protobuf:"varint,4,opt,name=origin,proto3,enum=flyteidl.core.ExecutionError_ErrorKind" json:"origin,omitempty"` +} + +func (x *AgentError) Reset() { + *x = AgentError{} + if protoimpl.UnsafeEnabled { + mi := &file_flyteidl_admin_agent_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AgentError) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AgentError) ProtoMessage() {} + +func (x *AgentError) ProtoReflect() protoreflect.Message { + mi := &file_flyteidl_admin_agent_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AgentError.ProtoReflect.Descriptor instead. +func (*AgentError) Descriptor() ([]byte, []int) { + return file_flyteidl_admin_agent_proto_rawDescGZIP(), []int{24} +} + +func (x *AgentError) GetCode() string { + if x != nil { + return x.Code + } + return "" +} + +func (x *AgentError) GetKind() AgentError_Kind { + if x != nil { + return x.Kind + } + return AgentError_NON_RECOVERABLE +} + +func (x *AgentError) GetOrigin() core.ExecutionError_ErrorKind { + if x != nil { + return x.Origin + } + return core.ExecutionError_ErrorKind(0) +} + var File_flyteidl_admin_agent_proto protoreflect.FileDescriptor var file_flyteidl_admin_agent_proto_rawDesc = []byte{ @@ -1870,7 +1994,7 @@ var file_flyteidl_admin_agent_proto_rawDesc = []byte{ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0xb3, + 0x75, 0x72, 0x63, 0x65, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0xf0, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x74, @@ -1890,122 +2014,138 @@ var file_flyteidl_admin_agent_proto_rawDesc = []byte{ 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x0a, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, - 0x49, 0x6e, 0x66, 0x6f, 0x22, 0x9c, 0x01, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x54, - 0x61, 0x73, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x09, 0x74, 0x61, - 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, - 0x01, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, - 0x12, 0x41, 0x0a, 0x0d, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, - 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, - 0x65, 0x67, 0x6f, 0x72, 0x79, 0x52, 0x0c, 0x74, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, - 0x6f, 0x72, 0x79, 0x22, 0x14, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x54, 0x61, 0x73, - 0x6b, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xc4, 0x01, 0x0a, 0x05, 0x41, 0x67, - 0x65, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x14, 0x73, 0x75, 0x70, 0x70, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x12, 0x73, 0x75, 0x70, 0x70, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x12, 0x17, 0x0a, - 0x07, 0x69, 0x73, 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, - 0x69, 0x73, 0x53, 0x79, 0x6e, 0x63, 0x12, 0x58, 0x0a, 0x19, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, - 0x74, 0x65, 0x64, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, - 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, - 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x52, 0x17, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, - 0x65, 0x64, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, - 0x22, 0x3c, 0x0a, 0x0c, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, + 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x3b, 0x0a, 0x0b, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x0a, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, + 0x72, 0x22, 0x9c, 0x01, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x08, + 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x41, 0x0a, + 0x0d, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, + 0x72, 0x79, 0x52, 0x0c, 0x74, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, + 0x22, 0x14, 0x0a, 0x12, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xc4, 0x01, 0x0a, 0x05, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x25, - 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3f, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x41, 0x67, 0x65, 0x6e, - 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x61, 0x67, 0x65, - 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, - 0x05, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x67, - 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x43, 0x0a, 0x12, 0x4c, - 0x69, 0x73, 0x74, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x2d, 0x0a, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x15, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, - 0x69, 0x6e, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, - 0x22, 0xdb, 0x02, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4d, 0x65, 0x74, 0x72, - 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x09, 0x74, 0x61, - 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, - 0x01, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, - 0x12, 0x18, 0x0a, 0x07, 0x71, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x09, 0x52, 0x07, 0x71, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, - 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x04, - 0x73, 0x74, 0x65, 0x70, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12, 0x41, 0x0a, 0x0d, 0x74, - 0x61, 0x73, 0x6b, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, - 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, - 0x52, 0x0c, 0x74, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x22, 0x58, - 0x0a, 0x16, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, - 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0xc9, 0x01, 0x0a, 0x12, 0x47, 0x65, 0x74, - 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x1f, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, - 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, - 0x6e, 0x12, 0x41, 0x0a, 0x0d, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, - 0x72, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, - 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x52, 0x0c, 0x74, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, - 0x67, 0x6f, 0x72, 0x79, 0x22, 0x31, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4c, - 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x33, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x54, 0x61, - 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x6f, - 0x64, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0xa1, 0x01, 0x0a, - 0x13, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x43, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x14, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x12, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x73, + 0x5f, 0x73, 0x79, 0x6e, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x69, 0x73, 0x53, + 0x79, 0x6e, 0x63, 0x12, 0x58, 0x0a, 0x19, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, + 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, + 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, + 0x67, 0x6f, 0x72, 0x79, 0x52, 0x17, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, + 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x69, 0x65, 0x73, 0x22, 0x3c, 0x0a, + 0x0c, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x25, 0x0a, 0x0f, 0x47, + 0x65, 0x74, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x3f, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x05, 0x61, 0x67, + 0x65, 0x6e, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x41, 0x67, 0x65, 0x6e, 0x74, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x43, 0x0a, 0x12, 0x4c, 0x69, 0x73, 0x74, + 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2d, + 0x0a, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, + 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xdb, 0x02, + 0x0a, 0x15, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x08, + 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x18, 0x0a, + 0x07, 0x71, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, + 0x71, 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, + 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x04, 0x73, 0x74, 0x65, + 0x70, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x12, 0x41, 0x0a, 0x0d, 0x74, 0x61, 0x73, 0x6b, + 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, + 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x52, 0x0c, 0x74, + 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x22, 0x58, 0x0a, 0x16, 0x47, + 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0xc9, 0x01, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, + 0x6b, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x09, + 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, + 0x02, 0x18, 0x01, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x23, 0x0a, + 0x0d, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, + 0x74, 0x61, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x05, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, + 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x41, + 0x0a, 0x0d, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, + 0x6f, 0x72, 0x79, 0x52, 0x0c, 0x74, 0x61, 0x73, 0x6b, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, + 0x79, 0x22, 0x31, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x14, + 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x33, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4c, + 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x6f, 0x64, 0x79, 0x12, + 0x18, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0xa1, 0x01, 0x0a, 0x13, 0x47, 0x65, + 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x43, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x29, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, + 0x69, 0x6e, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, + 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x48, - 0x00, 0x52, 0x06, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x04, 0x62, 0x6f, 0x64, - 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x61, 0x73, 0x6b, - 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x6f, 0x64, 0x79, - 0x48, 0x00, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x70, 0x61, 0x72, 0x74, - 0x2a, 0x62, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x15, 0x0a, 0x11, 0x52, 0x45, 0x54, - 0x52, 0x59, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x55, 0x52, 0x45, 0x10, 0x00, - 0x12, 0x15, 0x0a, 0x11, 0x50, 0x45, 0x52, 0x4d, 0x41, 0x4e, 0x45, 0x4e, 0x54, 0x5f, 0x46, 0x41, - 0x49, 0x4c, 0x55, 0x52, 0x45, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x50, 0x45, 0x4e, 0x44, 0x49, - 0x4e, 0x47, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, - 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x55, 0x43, 0x43, 0x45, 0x45, 0x44, 0x45, 0x44, 0x10, 0x04, - 0x1a, 0x02, 0x18, 0x01, 0x42, 0xb6, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x42, 0x0a, 0x41, 0x67, 0x65, - 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, 0x65, - 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2f, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0xa2, 0x02, 0x03, 0x46, 0x41, 0x58, 0xaa, 0x02, 0x0e, 0x46, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0xca, 0x02, 0x0e, - 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0xe2, 0x02, - 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x5c, - 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0f, 0x46, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x62, 0x06, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x6f, 0x64, 0x79, 0x48, 0x00, 0x52, + 0x04, 0x62, 0x6f, 0x64, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x70, 0x61, 0x72, 0x74, 0x22, 0xc4, 0x01, + 0x0a, 0x0a, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, + 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, + 0x12, 0x33, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1f, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, + 0x41, 0x67, 0x65, 0x6e, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x52, + 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x3f, 0x0a, 0x06, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x06, + 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x22, 0x2c, 0x0a, 0x04, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x13, + 0x0a, 0x0f, 0x4e, 0x4f, 0x4e, 0x5f, 0x52, 0x45, 0x43, 0x4f, 0x56, 0x45, 0x52, 0x41, 0x42, 0x4c, + 0x45, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x52, 0x45, 0x43, 0x4f, 0x56, 0x45, 0x52, 0x41, 0x42, + 0x4c, 0x45, 0x10, 0x01, 0x2a, 0x62, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x15, 0x0a, + 0x11, 0x52, 0x45, 0x54, 0x52, 0x59, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x55, + 0x52, 0x45, 0x10, 0x00, 0x12, 0x15, 0x0a, 0x11, 0x50, 0x45, 0x52, 0x4d, 0x41, 0x4e, 0x45, 0x4e, + 0x54, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x55, 0x52, 0x45, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x50, + 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x52, 0x55, 0x4e, 0x4e, + 0x49, 0x4e, 0x47, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x55, 0x43, 0x43, 0x45, 0x45, 0x44, + 0x45, 0x44, 0x10, 0x04, 0x1a, 0x02, 0x18, 0x01, 0x42, 0xb6, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x42, + 0x0a, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, + 0x72, 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0xa2, 0x02, 0x03, 0x46, 0x41, 0x58, + 0xaa, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x41, 0x64, 0x6d, 0x69, + 0x6e, 0xca, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, 0x6d, + 0x69, 0x6e, 0xe2, 0x02, 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, + 0x6d, 0x69, 0x6e, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, + 0x02, 0x0f, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x41, 0x64, 0x6d, 0x69, + 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2020,90 +2160,96 @@ func file_flyteidl_admin_agent_proto_rawDescGZIP() []byte { return file_flyteidl_admin_agent_proto_rawDescData } -var file_flyteidl_admin_agent_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_flyteidl_admin_agent_proto_msgTypes = make([]protoimpl.MessageInfo, 27) +var file_flyteidl_admin_agent_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_flyteidl_admin_agent_proto_msgTypes = make([]protoimpl.MessageInfo, 28) var file_flyteidl_admin_agent_proto_goTypes = []interface{}{ (State)(0), // 0: flyteidl.admin.State - (*TaskExecutionMetadata)(nil), // 1: flyteidl.admin.TaskExecutionMetadata - (*CreateTaskRequest)(nil), // 2: flyteidl.admin.CreateTaskRequest - (*CreateTaskResponse)(nil), // 3: flyteidl.admin.CreateTaskResponse - (*CreateRequestHeader)(nil), // 4: flyteidl.admin.CreateRequestHeader - (*ExecuteTaskSyncRequest)(nil), // 5: flyteidl.admin.ExecuteTaskSyncRequest - (*ExecuteTaskSyncResponseHeader)(nil), // 6: flyteidl.admin.ExecuteTaskSyncResponseHeader - (*ExecuteTaskSyncResponse)(nil), // 7: flyteidl.admin.ExecuteTaskSyncResponse - (*GetTaskRequest)(nil), // 8: flyteidl.admin.GetTaskRequest - (*GetTaskResponse)(nil), // 9: flyteidl.admin.GetTaskResponse - (*Resource)(nil), // 10: flyteidl.admin.Resource - (*DeleteTaskRequest)(nil), // 11: flyteidl.admin.DeleteTaskRequest - (*DeleteTaskResponse)(nil), // 12: flyteidl.admin.DeleteTaskResponse - (*Agent)(nil), // 13: flyteidl.admin.Agent - (*TaskCategory)(nil), // 14: flyteidl.admin.TaskCategory - (*GetAgentRequest)(nil), // 15: flyteidl.admin.GetAgentRequest - (*GetAgentResponse)(nil), // 16: flyteidl.admin.GetAgentResponse - (*ListAgentsRequest)(nil), // 17: flyteidl.admin.ListAgentsRequest - (*ListAgentsResponse)(nil), // 18: flyteidl.admin.ListAgentsResponse - (*GetTaskMetricsRequest)(nil), // 19: flyteidl.admin.GetTaskMetricsRequest - (*GetTaskMetricsResponse)(nil), // 20: flyteidl.admin.GetTaskMetricsResponse - (*GetTaskLogsRequest)(nil), // 21: flyteidl.admin.GetTaskLogsRequest - (*GetTaskLogsResponseHeader)(nil), // 22: flyteidl.admin.GetTaskLogsResponseHeader - (*GetTaskLogsResponseBody)(nil), // 23: flyteidl.admin.GetTaskLogsResponseBody - (*GetTaskLogsResponse)(nil), // 24: flyteidl.admin.GetTaskLogsResponse - nil, // 25: flyteidl.admin.TaskExecutionMetadata.LabelsEntry - nil, // 26: flyteidl.admin.TaskExecutionMetadata.AnnotationsEntry - nil, // 27: flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntry - (*core.TaskExecutionIdentifier)(nil), // 28: flyteidl.core.TaskExecutionIdentifier - (*core.TaskNodeOverrides)(nil), // 29: flyteidl.core.TaskNodeOverrides - (*core.Identity)(nil), // 30: flyteidl.core.Identity - (*core.LiteralMap)(nil), // 31: flyteidl.core.LiteralMap - (*core.TaskTemplate)(nil), // 32: flyteidl.core.TaskTemplate - (*core.TaskLog)(nil), // 33: flyteidl.core.TaskLog - (core.TaskExecution_Phase)(0), // 34: flyteidl.core.TaskExecution.Phase - (*structpb.Struct)(nil), // 35: google.protobuf.Struct - (*timestamppb.Timestamp)(nil), // 36: google.protobuf.Timestamp - (*durationpb.Duration)(nil), // 37: google.protobuf.Duration - (*core.ExecutionMetricResult)(nil), // 38: flyteidl.core.ExecutionMetricResult + (AgentError_Kind)(0), // 1: flyteidl.admin.AgentError.Kind + (*TaskExecutionMetadata)(nil), // 2: flyteidl.admin.TaskExecutionMetadata + (*CreateTaskRequest)(nil), // 3: flyteidl.admin.CreateTaskRequest + (*CreateTaskResponse)(nil), // 4: flyteidl.admin.CreateTaskResponse + (*CreateRequestHeader)(nil), // 5: flyteidl.admin.CreateRequestHeader + (*ExecuteTaskSyncRequest)(nil), // 6: flyteidl.admin.ExecuteTaskSyncRequest + (*ExecuteTaskSyncResponseHeader)(nil), // 7: flyteidl.admin.ExecuteTaskSyncResponseHeader + (*ExecuteTaskSyncResponse)(nil), // 8: flyteidl.admin.ExecuteTaskSyncResponse + (*GetTaskRequest)(nil), // 9: flyteidl.admin.GetTaskRequest + (*GetTaskResponse)(nil), // 10: flyteidl.admin.GetTaskResponse + (*Resource)(nil), // 11: flyteidl.admin.Resource + (*DeleteTaskRequest)(nil), // 12: flyteidl.admin.DeleteTaskRequest + (*DeleteTaskResponse)(nil), // 13: flyteidl.admin.DeleteTaskResponse + (*Agent)(nil), // 14: flyteidl.admin.Agent + (*TaskCategory)(nil), // 15: flyteidl.admin.TaskCategory + (*GetAgentRequest)(nil), // 16: flyteidl.admin.GetAgentRequest + (*GetAgentResponse)(nil), // 17: flyteidl.admin.GetAgentResponse + (*ListAgentsRequest)(nil), // 18: flyteidl.admin.ListAgentsRequest + (*ListAgentsResponse)(nil), // 19: flyteidl.admin.ListAgentsResponse + (*GetTaskMetricsRequest)(nil), // 20: flyteidl.admin.GetTaskMetricsRequest + (*GetTaskMetricsResponse)(nil), // 21: flyteidl.admin.GetTaskMetricsResponse + (*GetTaskLogsRequest)(nil), // 22: flyteidl.admin.GetTaskLogsRequest + (*GetTaskLogsResponseHeader)(nil), // 23: flyteidl.admin.GetTaskLogsResponseHeader + (*GetTaskLogsResponseBody)(nil), // 24: flyteidl.admin.GetTaskLogsResponseBody + (*GetTaskLogsResponse)(nil), // 25: flyteidl.admin.GetTaskLogsResponse + (*AgentError)(nil), // 26: flyteidl.admin.AgentError + nil, // 27: flyteidl.admin.TaskExecutionMetadata.LabelsEntry + nil, // 28: flyteidl.admin.TaskExecutionMetadata.AnnotationsEntry + nil, // 29: flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntry + (*core.TaskExecutionIdentifier)(nil), // 30: flyteidl.core.TaskExecutionIdentifier + (*core.TaskNodeOverrides)(nil), // 31: flyteidl.core.TaskNodeOverrides + (*core.Identity)(nil), // 32: flyteidl.core.Identity + (*core.LiteralMap)(nil), // 33: flyteidl.core.LiteralMap + (*core.TaskTemplate)(nil), // 34: flyteidl.core.TaskTemplate + (*core.TaskLog)(nil), // 35: flyteidl.core.TaskLog + (core.TaskExecution_Phase)(0), // 36: flyteidl.core.TaskExecution.Phase + (*structpb.Struct)(nil), // 37: google.protobuf.Struct + (*timestamppb.Timestamp)(nil), // 38: google.protobuf.Timestamp + (*durationpb.Duration)(nil), // 39: google.protobuf.Duration + (*core.ExecutionMetricResult)(nil), // 40: flyteidl.core.ExecutionMetricResult + (core.ExecutionError_ErrorKind)(0), // 41: flyteidl.core.ExecutionError.ErrorKind } var file_flyteidl_admin_agent_proto_depIdxs = []int32{ - 28, // 0: flyteidl.admin.TaskExecutionMetadata.task_execution_id:type_name -> flyteidl.core.TaskExecutionIdentifier - 25, // 1: flyteidl.admin.TaskExecutionMetadata.labels:type_name -> flyteidl.admin.TaskExecutionMetadata.LabelsEntry - 26, // 2: flyteidl.admin.TaskExecutionMetadata.annotations:type_name -> flyteidl.admin.TaskExecutionMetadata.AnnotationsEntry - 27, // 3: flyteidl.admin.TaskExecutionMetadata.environment_variables:type_name -> flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntry - 29, // 4: flyteidl.admin.TaskExecutionMetadata.overrides:type_name -> flyteidl.core.TaskNodeOverrides - 30, // 5: flyteidl.admin.TaskExecutionMetadata.identity:type_name -> flyteidl.core.Identity - 31, // 6: flyteidl.admin.CreateTaskRequest.inputs:type_name -> flyteidl.core.LiteralMap - 32, // 7: flyteidl.admin.CreateTaskRequest.template:type_name -> flyteidl.core.TaskTemplate - 1, // 8: flyteidl.admin.CreateTaskRequest.task_execution_metadata:type_name -> flyteidl.admin.TaskExecutionMetadata - 32, // 9: flyteidl.admin.CreateRequestHeader.template:type_name -> flyteidl.core.TaskTemplate - 1, // 10: flyteidl.admin.CreateRequestHeader.task_execution_metadata:type_name -> flyteidl.admin.TaskExecutionMetadata - 4, // 11: flyteidl.admin.ExecuteTaskSyncRequest.header:type_name -> flyteidl.admin.CreateRequestHeader - 31, // 12: flyteidl.admin.ExecuteTaskSyncRequest.inputs:type_name -> flyteidl.core.LiteralMap - 10, // 13: flyteidl.admin.ExecuteTaskSyncResponseHeader.resource:type_name -> flyteidl.admin.Resource - 6, // 14: flyteidl.admin.ExecuteTaskSyncResponse.header:type_name -> flyteidl.admin.ExecuteTaskSyncResponseHeader - 31, // 15: flyteidl.admin.ExecuteTaskSyncResponse.outputs:type_name -> flyteidl.core.LiteralMap - 14, // 16: flyteidl.admin.GetTaskRequest.task_category:type_name -> flyteidl.admin.TaskCategory - 10, // 17: flyteidl.admin.GetTaskResponse.resource:type_name -> flyteidl.admin.Resource + 30, // 0: flyteidl.admin.TaskExecutionMetadata.task_execution_id:type_name -> flyteidl.core.TaskExecutionIdentifier + 27, // 1: flyteidl.admin.TaskExecutionMetadata.labels:type_name -> flyteidl.admin.TaskExecutionMetadata.LabelsEntry + 28, // 2: flyteidl.admin.TaskExecutionMetadata.annotations:type_name -> flyteidl.admin.TaskExecutionMetadata.AnnotationsEntry + 29, // 3: flyteidl.admin.TaskExecutionMetadata.environment_variables:type_name -> flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntry + 31, // 4: flyteidl.admin.TaskExecutionMetadata.overrides:type_name -> flyteidl.core.TaskNodeOverrides + 32, // 5: flyteidl.admin.TaskExecutionMetadata.identity:type_name -> flyteidl.core.Identity + 33, // 6: flyteidl.admin.CreateTaskRequest.inputs:type_name -> flyteidl.core.LiteralMap + 34, // 7: flyteidl.admin.CreateTaskRequest.template:type_name -> flyteidl.core.TaskTemplate + 2, // 8: flyteidl.admin.CreateTaskRequest.task_execution_metadata:type_name -> flyteidl.admin.TaskExecutionMetadata + 34, // 9: flyteidl.admin.CreateRequestHeader.template:type_name -> flyteidl.core.TaskTemplate + 2, // 10: flyteidl.admin.CreateRequestHeader.task_execution_metadata:type_name -> flyteidl.admin.TaskExecutionMetadata + 5, // 11: flyteidl.admin.ExecuteTaskSyncRequest.header:type_name -> flyteidl.admin.CreateRequestHeader + 33, // 12: flyteidl.admin.ExecuteTaskSyncRequest.inputs:type_name -> flyteidl.core.LiteralMap + 11, // 13: flyteidl.admin.ExecuteTaskSyncResponseHeader.resource:type_name -> flyteidl.admin.Resource + 7, // 14: flyteidl.admin.ExecuteTaskSyncResponse.header:type_name -> flyteidl.admin.ExecuteTaskSyncResponseHeader + 33, // 15: flyteidl.admin.ExecuteTaskSyncResponse.outputs:type_name -> flyteidl.core.LiteralMap + 15, // 16: flyteidl.admin.GetTaskRequest.task_category:type_name -> flyteidl.admin.TaskCategory + 11, // 17: flyteidl.admin.GetTaskResponse.resource:type_name -> flyteidl.admin.Resource 0, // 18: flyteidl.admin.Resource.state:type_name -> flyteidl.admin.State - 31, // 19: flyteidl.admin.Resource.outputs:type_name -> flyteidl.core.LiteralMap - 33, // 20: flyteidl.admin.Resource.log_links:type_name -> flyteidl.core.TaskLog - 34, // 21: flyteidl.admin.Resource.phase:type_name -> flyteidl.core.TaskExecution.Phase - 35, // 22: flyteidl.admin.Resource.custom_info:type_name -> google.protobuf.Struct - 14, // 23: flyteidl.admin.DeleteTaskRequest.task_category:type_name -> flyteidl.admin.TaskCategory - 14, // 24: flyteidl.admin.Agent.supported_task_categories:type_name -> flyteidl.admin.TaskCategory - 13, // 25: flyteidl.admin.GetAgentResponse.agent:type_name -> flyteidl.admin.Agent - 13, // 26: flyteidl.admin.ListAgentsResponse.agents:type_name -> flyteidl.admin.Agent - 36, // 27: flyteidl.admin.GetTaskMetricsRequest.start_time:type_name -> google.protobuf.Timestamp - 36, // 28: flyteidl.admin.GetTaskMetricsRequest.end_time:type_name -> google.protobuf.Timestamp - 37, // 29: flyteidl.admin.GetTaskMetricsRequest.step:type_name -> google.protobuf.Duration - 14, // 30: flyteidl.admin.GetTaskMetricsRequest.task_category:type_name -> flyteidl.admin.TaskCategory - 38, // 31: flyteidl.admin.GetTaskMetricsResponse.results:type_name -> flyteidl.core.ExecutionMetricResult - 14, // 32: flyteidl.admin.GetTaskLogsRequest.task_category:type_name -> flyteidl.admin.TaskCategory - 22, // 33: flyteidl.admin.GetTaskLogsResponse.header:type_name -> flyteidl.admin.GetTaskLogsResponseHeader - 23, // 34: flyteidl.admin.GetTaskLogsResponse.body:type_name -> flyteidl.admin.GetTaskLogsResponseBody - 35, // [35:35] is the sub-list for method output_type - 35, // [35:35] is the sub-list for method input_type - 35, // [35:35] is the sub-list for extension type_name - 35, // [35:35] is the sub-list for extension extendee - 0, // [0:35] is the sub-list for field type_name + 33, // 19: flyteidl.admin.Resource.outputs:type_name -> flyteidl.core.LiteralMap + 35, // 20: flyteidl.admin.Resource.log_links:type_name -> flyteidl.core.TaskLog + 36, // 21: flyteidl.admin.Resource.phase:type_name -> flyteidl.core.TaskExecution.Phase + 37, // 22: flyteidl.admin.Resource.custom_info:type_name -> google.protobuf.Struct + 26, // 23: flyteidl.admin.Resource.agent_error:type_name -> flyteidl.admin.AgentError + 15, // 24: flyteidl.admin.DeleteTaskRequest.task_category:type_name -> flyteidl.admin.TaskCategory + 15, // 25: flyteidl.admin.Agent.supported_task_categories:type_name -> flyteidl.admin.TaskCategory + 14, // 26: flyteidl.admin.GetAgentResponse.agent:type_name -> flyteidl.admin.Agent + 14, // 27: flyteidl.admin.ListAgentsResponse.agents:type_name -> flyteidl.admin.Agent + 38, // 28: flyteidl.admin.GetTaskMetricsRequest.start_time:type_name -> google.protobuf.Timestamp + 38, // 29: flyteidl.admin.GetTaskMetricsRequest.end_time:type_name -> google.protobuf.Timestamp + 39, // 30: flyteidl.admin.GetTaskMetricsRequest.step:type_name -> google.protobuf.Duration + 15, // 31: flyteidl.admin.GetTaskMetricsRequest.task_category:type_name -> flyteidl.admin.TaskCategory + 40, // 32: flyteidl.admin.GetTaskMetricsResponse.results:type_name -> flyteidl.core.ExecutionMetricResult + 15, // 33: flyteidl.admin.GetTaskLogsRequest.task_category:type_name -> flyteidl.admin.TaskCategory + 23, // 34: flyteidl.admin.GetTaskLogsResponse.header:type_name -> flyteidl.admin.GetTaskLogsResponseHeader + 24, // 35: flyteidl.admin.GetTaskLogsResponse.body:type_name -> flyteidl.admin.GetTaskLogsResponseBody + 1, // 36: flyteidl.admin.AgentError.kind:type_name -> flyteidl.admin.AgentError.Kind + 41, // 37: flyteidl.admin.AgentError.origin:type_name -> flyteidl.core.ExecutionError.ErrorKind + 38, // [38:38] is the sub-list for method output_type + 38, // [38:38] is the sub-list for method input_type + 38, // [38:38] is the sub-list for extension type_name + 38, // [38:38] is the sub-list for extension extendee + 0, // [0:38] is the sub-list for field type_name } func init() { file_flyteidl_admin_agent_proto_init() } @@ -2400,6 +2546,18 @@ func file_flyteidl_admin_agent_proto_init() { return nil } } + file_flyteidl_admin_agent_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AgentError); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_flyteidl_admin_agent_proto_msgTypes[4].OneofWrappers = []interface{}{ (*ExecuteTaskSyncRequest_Header)(nil), @@ -2418,8 +2576,8 @@ func file_flyteidl_admin_agent_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_flyteidl_admin_agent_proto_rawDesc, - NumEnums: 1, - NumMessages: 27, + NumEnums: 2, + NumMessages: 28, NumExtensions: 0, NumServices: 0, }, diff --git a/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go index 1b878ceeb6..6e4a94b85e 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go @@ -2142,7 +2142,7 @@ var file_flyteidl_admin_execution_proto_rawDesc = []byte{ 0x5f, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x85, + 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x8b, 0x05, 0x0a, 0x11, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x43, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2f, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, @@ -2176,179 +2176,179 @@ var file_flyteidl_admin_execution_proto_rawDesc = []byte{ 0x64, 0x73, 0x18, 0x12, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x44, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x64, 0x73, - 0x22, 0x74, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, + 0x22, 0x7a, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x41, 0x4e, 0x55, 0x41, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x43, 0x48, 0x45, 0x44, 0x55, 0x4c, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x59, 0x53, 0x54, 0x45, 0x4d, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x4c, 0x41, 0x55, 0x4e, 0x43, 0x48, 0x10, 0x03, 0x12, 0x12, 0x0a, 0x0e, 0x43, 0x48, 0x49, 0x4c, 0x44, 0x5f, 0x57, 0x4f, 0x52, 0x4b, 0x46, 0x4c, 0x4f, 0x57, 0x10, 0x04, 0x12, 0x0d, 0x0a, 0x09, 0x52, 0x45, 0x43, 0x4f, 0x56, 0x45, 0x52, 0x45, 0x44, 0x10, 0x05, 0x12, 0x0b, 0x0a, 0x07, 0x54, 0x52, 0x49, - 0x47, 0x47, 0x45, 0x52, 0x10, 0x06, 0x22, 0x56, 0x0a, 0x10, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, - 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x42, 0x0a, 0x0d, 0x6e, 0x6f, - 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, - 0x69, 0x6e, 0x2e, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x0d, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xd6, - 0x09, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x70, 0x65, 0x63, - 0x12, 0x3a, 0x0a, 0x0b, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, - 0x52, 0x0a, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x12, 0x35, 0x0a, 0x06, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, - 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x42, 0x02, 0x18, 0x01, 0x52, 0x06, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x12, 0x48, 0x0a, 0x0d, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x4e, 0x6f, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x6e, - 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x21, 0x0a, 0x0b, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x61, 0x6c, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x08, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x41, 0x6c, 0x6c, 0x12, - 0x2e, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, - 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, - 0x3d, 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x08, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x49, - 0x0a, 0x10, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x78, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, - 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, - 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x39, 0x0a, 0x09, 0x61, 0x75, 0x74, - 0x68, 0x5f, 0x72, 0x6f, 0x6c, 0x65, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x41, 0x75, - 0x74, 0x68, 0x52, 0x6f, 0x6c, 0x65, 0x42, 0x02, 0x18, 0x01, 0x52, 0x08, 0x61, 0x75, 0x74, 0x68, - 0x52, 0x6f, 0x6c, 0x65, 0x12, 0x4d, 0x0a, 0x12, 0x71, 0x75, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x5f, - 0x6f, 0x66, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1f, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x51, 0x75, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, - 0x65, 0x52, 0x10, 0x71, 0x75, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, - 0x69, 0x63, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x6d, 0x61, 0x78, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6c, - 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x18, 0x12, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x6d, 0x61, - 0x78, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x12, 0x58, 0x0a, 0x16, - 0x72, 0x61, 0x77, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x52, 0x61, - 0x77, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x52, 0x13, 0x72, 0x61, 0x77, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x50, 0x0a, 0x12, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, - 0x72, 0x5f, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x14, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, - 0x6d, 0x69, 0x6e, 0x2e, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x41, 0x73, 0x73, 0x69, 0x67, - 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x11, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x41, 0x73, - 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x40, 0x0a, 0x0d, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x18, 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0d, 0x69, 0x6e, 0x74, - 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x6f, 0x76, - 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x18, 0x16, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x0e, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x43, 0x61, - 0x63, 0x68, 0x65, 0x12, 0x28, 0x0a, 0x04, 0x65, 0x6e, 0x76, 0x73, 0x18, 0x17, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x14, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, - 0x69, 0x6e, 0x2e, 0x45, 0x6e, 0x76, 0x73, 0x52, 0x04, 0x65, 0x6e, 0x76, 0x73, 0x12, 0x16, 0x0a, - 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x18, 0x20, 0x03, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, - 0x04, 0x74, 0x61, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x17, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, - 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x52, 0x15, 0x65, - 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x4c, - 0x61, 0x62, 0x65, 0x6c, 0x12, 0x61, 0x0a, 0x19, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x65, 0x6e, 0x76, 0x5f, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x18, 0x1a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x45, 0x6e, 0x76, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x17, - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x76, 0x41, 0x73, 0x73, 0x69, - 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x42, 0x18, 0x0a, 0x16, 0x6e, 0x6f, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, - 0x73, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, 0x22, 0x6d, 0x0a, 0x19, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, - 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, - 0x12, 0x14, 0x0a, 0x05, 0x63, 0x61, 0x75, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x63, 0x61, 0x75, 0x73, 0x65, 0x22, 0x1c, 0x0a, 0x1a, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x5d, 0x0a, 0x1f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, - 0x02, 0x69, 0x64, 0x22, 0x88, 0x02, 0x0a, 0x20, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x35, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x55, 0x72, 0x6c, 0x42, 0x6c, - 0x6f, 0x62, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, - 0x33, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, - 0x2e, 0x55, 0x72, 0x6c, 0x42, 0x6c, 0x6f, 0x62, 0x42, 0x02, 0x18, 0x01, 0x52, 0x06, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x0b, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, - 0x6c, 0x4d, 0x61, 0x70, 0x52, 0x0a, 0x66, 0x75, 0x6c, 0x6c, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, - 0x12, 0x3c, 0x0a, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, - 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, - 0x70, 0x52, 0x0b, 0x66, 0x75, 0x6c, 0x6c, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x22, 0x8a, - 0x01, 0x0a, 0x16, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x02, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, - 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x34, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, - 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0xae, 0x01, 0x0a, 0x1b, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x43, 0x68, - 0x61, 0x6e, 0x67, 0x65, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x34, 0x0a, 0x05, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x12, 0x3b, 0x0a, 0x0b, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x5f, 0x61, 0x74, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x52, 0x0a, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x41, 0x74, 0x12, 0x1c, - 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x22, 0x19, 0x0a, 0x17, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x76, 0x0a, 0x22, 0x57, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, + 0x47, 0x47, 0x45, 0x52, 0x10, 0x06, 0x22, 0x04, 0x08, 0x07, 0x10, 0x07, 0x22, 0x56, 0x0a, 0x10, + 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x69, 0x73, 0x74, + 0x12, 0x42, 0x0a, 0x0d, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0d, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xd6, 0x09, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x12, 0x3a, 0x0a, 0x0b, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, + 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, + 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0a, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x50, 0x6c, + 0x61, 0x6e, 0x12, 0x35, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x42, 0x02, 0x18, + 0x01, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x3d, 0x0a, 0x08, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x48, 0x0a, 0x0d, 0x6e, 0x6f, 0x74, 0x69, + 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, + 0x2e, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x69, 0x73, + 0x74, 0x48, 0x00, 0x52, 0x0d, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x12, 0x21, 0x0a, 0x0b, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x61, 0x6c, + 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x69, 0x73, 0x61, 0x62, + 0x6c, 0x65, 0x41, 0x6c, 0x6c, 0x12, 0x2e, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x06, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x3d, 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x49, 0x0a, 0x10, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, + 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, + 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0f, + 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, + 0x39, 0x0a, 0x09, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x72, 0x6f, 0x6c, 0x65, 0x18, 0x10, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, + 0x6d, 0x69, 0x6e, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x52, 0x6f, 0x6c, 0x65, 0x42, 0x02, 0x18, 0x01, + 0x52, 0x08, 0x61, 0x75, 0x74, 0x68, 0x52, 0x6f, 0x6c, 0x65, 0x12, 0x4d, 0x0a, 0x12, 0x71, 0x75, + 0x61, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x6f, 0x66, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x51, 0x75, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x4f, 0x66, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x10, 0x71, 0x75, 0x61, 0x6c, 0x69, 0x74, 0x79, + 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x6d, 0x61, 0x78, + 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x18, 0x12, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x0e, 0x6d, 0x61, 0x78, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, + 0x73, 0x6d, 0x12, 0x58, 0x0a, 0x16, 0x72, 0x61, 0x77, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x13, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, + 0x6d, 0x69, 0x6e, 0x2e, 0x52, 0x61, 0x77, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x13, 0x72, 0x61, 0x77, 0x4f, 0x75, 0x74, 0x70, + 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x50, 0x0a, 0x12, + 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, + 0x6e, 0x74, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, + 0x72, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x11, 0x63, 0x6c, 0x75, + 0x73, 0x74, 0x65, 0x72, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x40, + 0x0a, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x18, + 0x15, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, + 0x12, 0x27, 0x0a, 0x0f, 0x6f, 0x76, 0x65, 0x72, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x63, 0x61, + 0x63, 0x68, 0x65, 0x18, 0x16, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x6f, 0x76, 0x65, 0x72, 0x77, + 0x72, 0x69, 0x74, 0x65, 0x43, 0x61, 0x63, 0x68, 0x65, 0x12, 0x28, 0x0a, 0x04, 0x65, 0x6e, 0x76, + 0x73, 0x18, 0x17, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x6e, 0x76, 0x73, 0x52, 0x04, 0x65, + 0x6e, 0x76, 0x73, 0x12, 0x16, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x18, 0x20, 0x03, 0x28, + 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x12, 0x5d, 0x0a, 0x17, 0x65, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, + 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x19, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x4c, 0x61, + 0x62, 0x65, 0x6c, 0x52, 0x15, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6c, + 0x75, 0x73, 0x74, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x61, 0x0a, 0x19, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x6e, 0x76, 0x5f, 0x61, 0x73, 0x73, 0x69, + 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x1a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, + 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x76, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, + 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x17, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x6e, 0x76, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x42, 0x18, 0x0a, + 0x16, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6f, 0x76, + 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, 0x22, 0x6d, 0x0a, + 0x19, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x61, 0x75, 0x73, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x63, 0x61, 0x75, 0x73, 0x65, 0x22, 0x1c, 0x0a, 0x1a, + 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, + 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x5d, 0x0a, 0x1f, 0x57, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, - 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x65, 0x70, - 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x64, 0x65, 0x70, 0x74, 0x68, 0x22, - 0x4e, 0x0a, 0x23, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x2a, - 0x3e, 0x0a, 0x0e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x12, 0x14, 0x0a, 0x10, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x41, - 0x43, 0x54, 0x49, 0x56, 0x45, 0x10, 0x00, 0x12, 0x16, 0x0a, 0x12, 0x45, 0x58, 0x45, 0x43, 0x55, - 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x41, 0x52, 0x43, 0x48, 0x49, 0x56, 0x45, 0x44, 0x10, 0x01, 0x42, - 0xba, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x42, 0x0e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, - 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, - 0x61, 0x64, 0x6d, 0x69, 0x6e, 0xa2, 0x02, 0x03, 0x46, 0x41, 0x58, 0xaa, 0x02, 0x0e, 0x46, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0xca, 0x02, 0x0e, 0x46, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0xe2, 0x02, 0x1a, - 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x5c, 0x47, - 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0f, 0x46, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x22, 0x88, 0x02, 0x0a, 0x20, 0x57, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x35, + 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, + 0x2e, 0x55, 0x72, 0x6c, 0x42, 0x6c, 0x6f, 0x62, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x33, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x55, 0x72, 0x6c, 0x42, 0x6c, 0x6f, 0x62, 0x42, 0x02, + 0x18, 0x01, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x0b, 0x66, 0x75, + 0x6c, 0x6c, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x52, 0x0a, 0x66, 0x75, 0x6c, 0x6c, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x3c, 0x0a, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x6f, + 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, + 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x52, 0x0b, 0x66, 0x75, 0x6c, 0x6c, 0x4f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x73, 0x22, 0x8a, 0x01, 0x0a, 0x16, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x3a, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, + 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x34, 0x0a, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x45, 0x78, 0x65, 0x63, + 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x22, 0xae, 0x01, 0x0a, 0x1b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, + 0x73, 0x12, 0x34, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, + 0x6e, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, + 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x3b, 0x0a, 0x0b, 0x6f, 0x63, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x64, 0x41, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, + 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, + 0x61, 0x6c, 0x22, 0x19, 0x0a, 0x17, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x76, 0x0a, + 0x22, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, + 0x14, 0x0a, 0x05, 0x64, 0x65, 0x70, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, + 0x64, 0x65, 0x70, 0x74, 0x68, 0x22, 0x4e, 0x0a, 0x23, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, + 0x72, 0x69, 0x63, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x04, + 0x73, 0x70, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x70, 0x61, 0x6e, 0x52, + 0x04, 0x73, 0x70, 0x61, 0x6e, 0x2a, 0x3e, 0x0a, 0x0e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x45, 0x58, 0x45, 0x43, 0x55, + 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x41, 0x43, 0x54, 0x49, 0x56, 0x45, 0x10, 0x00, 0x12, 0x16, 0x0a, + 0x12, 0x45, 0x58, 0x45, 0x43, 0x55, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x41, 0x52, 0x43, 0x48, 0x49, + 0x56, 0x45, 0x44, 0x10, 0x01, 0x42, 0xba, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x42, 0x0e, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0xa2, 0x02, 0x03, 0x46, 0x41, + 0x58, 0xaa, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x41, 0x64, 0x6d, + 0x69, 0x6e, 0xca, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, + 0x6d, 0x69, 0x6e, 0xe2, 0x02, 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, + 0x64, 0x6d, 0x69, 0x6e, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0xea, 0x02, 0x0f, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x41, 0x64, 0x6d, + 0x69, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go index b4d9cb8c89..7f38db6da5 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go @@ -368,6 +368,8 @@ type NodeExecutionMetaData struct { // Boolean flag indicating if the node is an array node. This is intended to uniquely identify // array nodes from other nodes which can have is_parent_node as true. IsArray bool `protobuf:"varint,5,opt,name=is_array,json=isArray,proto3" json:"is_array,omitempty"` + // Whether this node is an eager node. + IsEager bool `protobuf:"varint,6,opt,name=is_eager,json=isEager,proto3" json:"is_eager,omitempty"` } func (x *NodeExecutionMetaData) Reset() { @@ -437,6 +439,13 @@ func (x *NodeExecutionMetaData) GetIsArray() bool { return false } +func (x *NodeExecutionMetaData) GetIsEager() bool { + if x != nil { + return x.IsEager + } + return false +} + // Request structure to retrieve a list of node execution entities. // See :ref:`ref_flyteidl.admin.NodeExecution` for more details type NodeExecutionList struct { @@ -1218,7 +1227,7 @@ var file_flyteidl_admin_node_execution_proto_rawDesc = []byte{ 0x32, 0x25, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x22, 0xba, 0x01, 0x0a, 0x15, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x61, 0x22, 0xd5, 0x01, 0x0a, 0x15, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x24, 0x0a, 0x0e, @@ -1229,150 +1238,152 @@ var file_flyteidl_admin_node_execution_proto_rawDesc = []byte{ 0x64, 0x65, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, 0x73, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x61, 0x72, 0x72, 0x61, 0x79, 0x18, - 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x22, 0x71, - 0x0a, 0x11, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4c, - 0x69, 0x73, 0x74, 0x12, 0x46, 0x0a, 0x0f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x4e, 0x6f, - 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x6e, 0x6f, 0x64, - 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, - 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, - 0x6e, 0x22, 0xf6, 0x05, 0x0a, 0x14, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, - 0x69, 0x6f, 0x6e, 0x43, 0x6c, 0x6f, 0x73, 0x75, 0x72, 0x65, 0x12, 0x23, 0x0a, 0x0a, 0x6f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, - 0x18, 0x01, 0x48, 0x00, 0x52, 0x09, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x55, 0x72, 0x69, 0x12, - 0x35, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, - 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, - 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x40, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, - 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x0a, 0x6f, 0x75, - 0x74, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x12, 0x38, 0x0a, 0x05, 0x70, 0x68, 0x61, 0x73, - 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x50, 0x68, 0x61, 0x73, 0x65, 0x52, 0x05, 0x70, 0x68, 0x61, - 0x73, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x35, 0x0a, - 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, - 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, - 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, + 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x12, 0x19, + 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x65, 0x61, 0x67, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x07, 0x69, 0x73, 0x45, 0x61, 0x67, 0x65, 0x72, 0x22, 0x71, 0x0a, 0x11, 0x4e, 0x6f, 0x64, + 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x46, + 0x0a, 0x0f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0e, 0x6e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, + 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0xf6, 0x05, 0x0a, + 0x14, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6c, + 0x6f, 0x73, 0x75, 0x72, 0x65, 0x12, 0x23, 0x0a, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, + 0x75, 0x72, 0x69, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, + 0x09, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x55, 0x72, 0x69, 0x12, 0x35, 0x0a, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x12, 0x40, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, + 0x70, 0x42, 0x02, 0x18, 0x01, 0x48, 0x00, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x12, 0x38, 0x0a, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x50, 0x68, 0x61, 0x73, 0x65, 0x52, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x12, 0x39, 0x0a, + 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x35, 0x0a, 0x08, 0x64, 0x75, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x5c, 0x0a, 0x16, 0x77, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x57, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x48, 0x01, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x50, 0x0a, 0x12, 0x74, 0x61, 0x73, 0x6b, - 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x01, 0x52, 0x10, 0x74, 0x61, 0x73, 0x6b, 0x4e, 0x6f, - 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x19, 0x0a, 0x08, 0x64, 0x65, - 0x63, 0x6b, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x64, 0x65, - 0x63, 0x6b, 0x55, 0x72, 0x69, 0x12, 0x2f, 0x0a, 0x14, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, - 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x0c, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x11, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4a, 0x6f, 0x62, 0x53, - 0x70, 0x65, 0x63, 0x55, 0x72, 0x69, 0x42, 0x0f, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x42, 0x11, 0x0a, 0x0f, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x64, 0x0a, 0x14, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x12, 0x4c, 0x0a, 0x0b, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, - 0x69, 0x65, 0x72, 0x52, 0x0b, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, - 0x22, 0xc0, 0x01, 0x0a, 0x10, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x44, 0x0a, 0x0c, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, - 0x6c, 0x6f, 0x67, 0x43, 0x61, 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0b, - 0x63, 0x61, 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x3f, 0x0a, 0x0b, 0x63, - 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x52, 0x0a, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x4b, 0x65, 0x79, 0x12, 0x25, 0x0a, 0x0e, - 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, - 0x55, 0x72, 0x69, 0x22, 0xce, 0x01, 0x0a, 0x1b, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, + 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x5c, 0x0a, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, + 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x01, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x12, 0x29, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x53, - 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, - 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6c, 0x6f, 0x73, 0x75, 0x72, - 0x65, 0x52, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x12, 0x2f, 0x0a, 0x14, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x6a, - 0x6f, 0x62, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x11, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4a, 0x6f, 0x62, 0x53, 0x70, 0x65, - 0x63, 0x55, 0x72, 0x69, 0x22, 0x55, 0x0a, 0x1b, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x36, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, - 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x22, 0x96, 0x03, 0x0a, 0x1c, - 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x06, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x55, 0x72, - 0x6c, 0x42, 0x6c, 0x6f, 0x62, 0x42, 0x02, 0x18, 0x01, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x73, 0x12, 0x35, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, - 0x6d, 0x69, 0x6e, 0x2e, 0x55, 0x72, 0x6c, 0x42, 0x6c, 0x6f, 0x62, 0x42, 0x02, 0x18, 0x01, 0x52, - 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x0b, 0x66, 0x75, 0x6c, 0x6c, - 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x52, 0x0a, 0x66, 0x75, 0x6c, 0x6c, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x73, 0x12, 0x3c, 0x0a, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x6f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x52, 0x0b, 0x66, 0x75, 0x6c, 0x6c, 0x4f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x73, 0x12, 0x56, 0x0a, 0x10, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x77, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x44, 0x79, - 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, - 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0f, 0x64, 0x79, 0x6e, 0x61, 0x6d, - 0x69, 0x63, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x38, 0x0a, 0x0a, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x73, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, - 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, - 0x46, 0x6c, 0x79, 0x74, 0x65, 0x55, 0x52, 0x4c, 0x73, 0x52, 0x09, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x55, 0x72, 0x6c, 0x73, 0x22, 0x57, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x44, 0x79, 0x6e, 0x61, 0x6d, - 0x69, 0x63, 0x4e, 0x6f, 0x64, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x36, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, - 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x22, 0x72, 0x0a, - 0x1b, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4e, 0x6f, 0x64, 0x65, 0x57, 0x6f, 0x72, 0x6b, - 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x11, - 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, - 0x77, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, - 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6c, 0x6f, 0x73, 0x75, 0x72, 0x65, 0x52, - 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, - 0x77, 0x42, 0xbe, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x42, 0x12, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0xa2, 0x02, 0x03, 0x46, 0x41, - 0x58, 0xaa, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x41, 0x64, 0x6d, - 0x69, 0x6e, 0xca, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, - 0x6d, 0x69, 0x6e, 0xe2, 0x02, 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, - 0x64, 0x6d, 0x69, 0x6e, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0xea, 0x02, 0x0f, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x41, 0x64, 0x6d, - 0x69, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x74, 0x61, 0x12, 0x50, 0x0a, 0x12, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6e, 0x6f, 0x64, 0x65, + 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, + 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x48, 0x01, 0x52, 0x10, 0x74, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x19, 0x0a, 0x08, 0x64, 0x65, 0x63, 0x6b, 0x5f, 0x75, 0x72, + 0x69, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x64, 0x65, 0x63, 0x6b, 0x55, 0x72, 0x69, + 0x12, 0x2f, 0x0a, 0x14, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, + 0x73, 0x70, 0x65, 0x63, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, + 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4a, 0x6f, 0x62, 0x53, 0x70, 0x65, 0x63, 0x55, 0x72, + 0x69, 0x42, 0x0f, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x42, 0x11, 0x0a, 0x0f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x64, 0x0a, 0x14, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x4c, 0x0a, + 0x0b, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0b, + 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xc0, 0x01, 0x0a, 0x10, + 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x12, 0x44, 0x0a, 0x0c, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x43, 0x61, + 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0b, 0x63, 0x61, 0x63, 0x68, 0x65, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x3f, 0x0a, 0x0b, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, + 0x67, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, + 0x6c, 0x6f, 0x67, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0a, 0x63, 0x61, 0x74, + 0x61, 0x6c, 0x6f, 0x67, 0x4b, 0x65, 0x79, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0d, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x55, 0x72, 0x69, 0x22, 0xce, + 0x01, 0x0a, 0x1b, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x29, + 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, + 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x53, 0x0a, 0x11, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6c, 0x6f, 0x73, 0x75, 0x72, 0x65, 0x52, 0x10, 0x63, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x2f, + 0x0a, 0x14, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x73, 0x70, + 0x65, 0x63, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x64, 0x79, + 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4a, 0x6f, 0x62, 0x53, 0x70, 0x65, 0x63, 0x55, 0x72, 0x69, 0x22, + 0x55, 0x0a, 0x1b, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x36, + 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x22, 0x96, 0x03, 0x0a, 0x1c, 0x4e, 0x6f, 0x64, 0x65, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x55, 0x72, 0x6c, 0x42, 0x6c, 0x6f, 0x62, + 0x42, 0x02, 0x18, 0x01, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x35, 0x0a, 0x07, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, + 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x55, + 0x72, 0x6c, 0x42, 0x6c, 0x6f, 0x62, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, + 0x75, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x0b, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, + 0x4d, 0x61, 0x70, 0x52, 0x0a, 0x66, 0x75, 0x6c, 0x6c, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, + 0x3c, 0x0a, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, + 0x52, 0x0b, 0x66, 0x75, 0x6c, 0x6c, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x12, 0x56, 0x0a, + 0x10, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, + 0x77, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, + 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x0f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x38, 0x0a, 0x0a, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x5f, 0x75, + 0x72, 0x6c, 0x73, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x2e, 0x46, 0x6c, 0x79, 0x74, 0x65, + 0x55, 0x52, 0x4c, 0x73, 0x52, 0x09, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x55, 0x72, 0x6c, 0x73, 0x22, + 0x57, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4e, 0x6f, 0x64, + 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x36, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, + 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x22, 0x72, 0x0a, 0x1b, 0x44, 0x79, 0x6e, 0x61, + 0x6d, 0x69, 0x63, 0x4e, 0x6f, 0x64, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x43, 0x6c, 0x6f, 0x73, 0x75, 0x72, 0x65, 0x52, 0x10, 0x63, 0x6f, 0x6d, 0x70, + 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x42, 0xbe, 0x01, 0x0a, + 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x61, 0x64, + 0x6d, 0x69, 0x6e, 0x42, 0x12, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, 0x65, + 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2f, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0xa2, 0x02, 0x03, 0x46, 0x41, 0x58, 0xaa, 0x02, 0x0e, 0x46, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0xca, 0x02, 0x0e, + 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0xe2, 0x02, + 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x5c, + 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0f, 0x46, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x41, 0x64, 0x6d, 0x69, 0x6e, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/flyteidl/gen/pb-go/flyteidl/core/literals.pb.go b/flyteidl/gen/pb-go/flyteidl/core/literals.pb.go index 2225e74077..879f666136 100644 --- a/flyteidl/gen/pb-go/flyteidl/core/literals.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/core/literals.pb.go @@ -1220,6 +1220,7 @@ type BindingData struct { // *BindingData_Collection // *BindingData_Promise // *BindingData_Map + // *BindingData_OffloadedMetadata Value isBindingData_Value `protobuf_oneof:"value"` Union *UnionInfo `protobuf:"bytes,5,opt,name=union,proto3" json:"union,omitempty"` } @@ -1291,6 +1292,13 @@ func (x *BindingData) GetMap() *BindingDataMap { return nil } +func (x *BindingData) GetOffloadedMetadata() *LiteralOffloadedMetadata { + if x, ok := x.GetValue().(*BindingData_OffloadedMetadata); ok { + return x.OffloadedMetadata + } + return nil +} + func (x *BindingData) GetUnion() *UnionInfo { if x != nil { return x.Union @@ -1323,6 +1331,13 @@ type BindingData_Map struct { Map *BindingDataMap `protobuf:"bytes,4,opt,name=map,proto3,oneof"` } +type BindingData_OffloadedMetadata struct { + // Offloaded literal metadata + // When you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata. + // Used for nodes that don't have promises from upstream nodes such as ArrayNode subNodes. + OffloadedMetadata *LiteralOffloadedMetadata `protobuf:"bytes,6,opt,name=offloaded_metadata,json=offloadedMetadata,proto3,oneof"` +} + func (*BindingData_Scalar) isBindingData_Value() {} func (*BindingData_Collection) isBindingData_Value() {} @@ -1331,6 +1346,8 @@ func (*BindingData_Promise) isBindingData_Value() {} func (*BindingData_Map) isBindingData_Value() {} +func (*BindingData_OffloadedMetadata) isBindingData_Value() {} + // An input/output binding of a variable to either static value or a node output. type Binding struct { state protoimpl.MessageState @@ -1668,7 +1685,7 @@ var file_flyteidl_core_literals_proto_rawDesc = []byte{ 0x0a, 0x0a, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x22, 0xae, 0x02, 0x0a, 0x0b, 0x42, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x22, 0x88, 0x03, 0x0a, 0x0b, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x61, 0x74, 0x61, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x63, 0x61, 0x6c, 0x61, @@ -1684,33 +1701,39 @@ var file_flyteidl_core_literals_proto_rawDesc = []byte{ 0x31, 0x0a, 0x03, 0x6d, 0x61, 0x70, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x61, 0x74, 0x61, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x03, 0x6d, - 0x61, 0x70, 0x12, 0x2e, 0x0a, 0x05, 0x75, 0x6e, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x55, 0x6e, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x75, 0x6e, 0x69, - 0x6f, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x51, 0x0a, 0x07, 0x42, - 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x72, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x76, 0x61, 0x72, 0x12, 0x34, 0x0a, 0x07, 0x62, 0x69, 0x6e, 0x64, - 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, - 0x67, 0x44, 0x61, 0x74, 0x61, 0x52, 0x07, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x36, - 0x0a, 0x0c, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x61, 0x69, 0x72, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x29, 0x0a, 0x0d, 0x52, 0x65, 0x74, 0x72, 0x79, 0x53, - 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, - 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, - 0x73, 0x42, 0xb3, 0x01, 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0d, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, - 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, - 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, - 0x63, 0x6f, 0x72, 0x65, 0xa2, 0x02, 0x03, 0x46, 0x43, 0x58, 0xaa, 0x02, 0x0d, 0x46, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x43, 0x6f, 0x72, 0x65, 0xca, 0x02, 0x0d, 0x46, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0xe2, 0x02, 0x19, 0x46, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, - 0x6c, 0x3a, 0x3a, 0x43, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x70, 0x12, 0x58, 0x0a, 0x12, 0x6f, 0x66, 0x66, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x5f, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, + 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4f, 0x66, 0x66, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x4d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x00, 0x52, 0x11, 0x6f, 0x66, 0x66, 0x6c, 0x6f, + 0x61, 0x64, 0x65, 0x64, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x05, + 0x75, 0x6e, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x6e, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x75, 0x6e, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x51, 0x0a, 0x07, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, + 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x76, + 0x61, 0x72, 0x12, 0x34, 0x0a, 0x07, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x61, 0x74, 0x61, 0x52, + 0x07, 0x62, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x36, 0x0a, 0x0c, 0x4b, 0x65, 0x79, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x50, 0x61, 0x69, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x29, 0x0a, 0x0d, 0x52, 0x65, 0x74, 0x72, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, + 0x79, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x42, 0xb3, 0x01, 0x0a, 0x11, + 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x42, 0x0d, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, + 0x50, 0x01, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, + 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0xa2, 0x02, + 0x03, 0x46, 0x43, 0x58, 0xaa, 0x02, 0x0d, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x43, 0x6f, 0x72, 0x65, 0xca, 0x02, 0x0d, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, + 0x43, 0x6f, 0x72, 0x65, 0xe2, 0x02, 0x19, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, + 0x43, 0x6f, 0x72, 0x65, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0xea, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x43, 0x6f, 0x72, + 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1795,15 +1818,16 @@ var file_flyteidl_core_literals_proto_depIdxs = []int32{ 14, // 30: flyteidl.core.BindingData.collection:type_name -> flyteidl.core.BindingDataCollection 32, // 31: flyteidl.core.BindingData.promise:type_name -> flyteidl.core.OutputReference 15, // 32: flyteidl.core.BindingData.map:type_name -> flyteidl.core.BindingDataMap - 16, // 33: flyteidl.core.BindingData.union:type_name -> flyteidl.core.UnionInfo - 17, // 34: flyteidl.core.Binding.binding:type_name -> flyteidl.core.BindingData - 10, // 35: flyteidl.core.LiteralMap.LiteralsEntry.value:type_name -> flyteidl.core.Literal - 17, // 36: flyteidl.core.BindingDataMap.BindingsEntry.value:type_name -> flyteidl.core.BindingData - 37, // [37:37] is the sub-list for method output_type - 37, // [37:37] is the sub-list for method input_type - 37, // [37:37] is the sub-list for extension type_name - 37, // [37:37] is the sub-list for extension extendee - 0, // [0:37] is the sub-list for field type_name + 11, // 33: flyteidl.core.BindingData.offloaded_metadata:type_name -> flyteidl.core.LiteralOffloadedMetadata + 16, // 34: flyteidl.core.BindingData.union:type_name -> flyteidl.core.UnionInfo + 17, // 35: flyteidl.core.Binding.binding:type_name -> flyteidl.core.BindingData + 10, // 36: flyteidl.core.LiteralMap.LiteralsEntry.value:type_name -> flyteidl.core.Literal + 17, // 37: flyteidl.core.BindingDataMap.BindingsEntry.value:type_name -> flyteidl.core.BindingData + 38, // [38:38] is the sub-list for method output_type + 38, // [38:38] is the sub-list for method input_type + 38, // [38:38] is the sub-list for extension type_name + 38, // [38:38] is the sub-list for extension extendee + 0, // [0:38] is the sub-list for field type_name } func init() { file_flyteidl_core_literals_proto_init() } @@ -2096,6 +2120,7 @@ func file_flyteidl_core_literals_proto_init() { (*BindingData_Collection)(nil), (*BindingData_Promise)(nil), (*BindingData_Map)(nil), + (*BindingData_OffloadedMetadata)(nil), } type x struct{} out := protoimpl.TypeBuilder{ diff --git a/flyteidl/gen/pb-go/flyteidl/core/tasks.pb.go b/flyteidl/gen/pb-go/flyteidl/core/tasks.pb.go index 122ddce559..ceb9c71f1f 100644 --- a/flyteidl/gen/pb-go/flyteidl/core/tasks.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/core/tasks.pb.go @@ -714,6 +714,9 @@ type TaskMetadata struct { PodTemplateName string `protobuf:"bytes,12,opt,name=pod_template_name,json=podTemplateName,proto3" json:"pod_template_name,omitempty"` // cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache. CacheIgnoreInputVars []string `protobuf:"bytes,13,rep,name=cache_ignore_input_vars,json=cacheIgnoreInputVars,proto3" json:"cache_ignore_input_vars,omitempty"` + // is_eager indicates whether the task is eager or not. + // This would be used by CreateTask endpoint. + IsEager bool `protobuf:"varint,14,opt,name=is_eager,json=isEager,proto3" json:"is_eager,omitempty"` } func (x *TaskMetadata) Reset() { @@ -839,6 +842,13 @@ func (x *TaskMetadata) GetCacheIgnoreInputVars() []string { return nil } +func (x *TaskMetadata) GetIsEager() bool { + if x != nil { + return x.IsEager + } + return false +} + type isTaskMetadata_InterruptibleValue interface { isTaskMetadata_InterruptibleValue() } @@ -1692,7 +1702,7 @@ var file_flyteidl_core_tasks_proto_rawDesc = []byte{ 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6c, 0x61, 0x76, 0x6f, 0x72, 0x22, 0x27, 0x0a, 0x0b, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x4c, 0x59, 0x54, 0x45, - 0x5f, 0x53, 0x44, 0x4b, 0x10, 0x01, 0x22, 0xac, 0x05, 0x0a, 0x0c, 0x54, 0x61, 0x73, 0x6b, 0x4d, + 0x5f, 0x53, 0x44, 0x4b, 0x10, 0x01, 0x22, 0xc7, 0x05, 0x0a, 0x0c, 0x54, 0x61, 0x73, 0x6b, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x22, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x38, 0x0a, 0x07, 0x72, @@ -1730,183 +1740,184 @@ var file_flyteidl_core_tasks_proto_rawDesc = []byte{ 0x0a, 0x17, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x69, 0x67, 0x6e, 0x6f, 0x72, 0x65, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x76, 0x61, 0x72, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x09, 0x52, 0x14, 0x63, 0x61, 0x63, 0x68, 0x65, 0x49, 0x67, 0x6e, 0x6f, 0x72, 0x65, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x56, 0x61, 0x72, 0x73, 0x1a, 0x37, 0x0a, 0x09, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x15, - 0x0a, 0x13, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xd6, 0x05, 0x0a, 0x0c, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x29, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, - 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x3b, - 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, - 0x52, 0x09, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, - 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, - 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x12, 0x38, 0x0a, 0x09, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x48, 0x00, 0x52, 0x09, 0x63, 0x6f, 0x6e, - 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x30, 0x0a, 0x07, 0x6b, 0x38, 0x73, 0x5f, 0x70, 0x6f, - 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x38, 0x73, 0x50, 0x6f, 0x64, 0x48, 0x00, - 0x52, 0x06, 0x6b, 0x38, 0x73, 0x50, 0x6f, 0x64, 0x12, 0x26, 0x0a, 0x03, 0x73, 0x71, 0x6c, 0x18, - 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x71, 0x6c, 0x48, 0x00, 0x52, 0x03, 0x73, 0x71, 0x6c, - 0x12, 0x2a, 0x0a, 0x11, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0f, 0x74, 0x61, 0x73, - 0x6b, 0x54, 0x79, 0x70, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x49, 0x0a, 0x10, - 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, - 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x52, 0x0f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12, 0x4f, 0x0a, 0x12, 0x65, 0x78, 0x74, 0x65, 0x6e, - 0x64, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x09, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x73, 0x52, 0x11, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x39, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x22, 0x36, - 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x50, 0x6f, 0x72, 0x74, 0x12, - 0x25, 0x0a, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x70, 0x6f, 0x72, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, - 0x65, 0x72, 0x50, 0x6f, 0x72, 0x74, 0x22, 0xfc, 0x03, 0x0a, 0x09, 0x43, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, - 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, - 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x61, 0x72, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x04, 0x61, 0x72, 0x67, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, - 0x12, 0x2d, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x65, - 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x61, 0x69, 0x72, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x12, - 0x37, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x61, 0x69, 0x72, 0x42, 0x02, 0x18, 0x01, - 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x6f, 0x72, 0x74, - 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, - 0x72, 0x50, 0x6f, 0x72, 0x74, 0x52, 0x05, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x12, 0x41, 0x0a, 0x0b, - 0x64, 0x61, 0x74, 0x61, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x09, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x4c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x49, 0x0a, 0x0c, 0x61, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, - 0x41, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0c, 0x61, 0x72, - 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x22, 0x49, 0x0a, 0x0c, 0x41, 0x72, - 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, - 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x4d, 0x44, 0x36, 0x34, - 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x52, 0x4d, 0x36, 0x34, 0x10, 0x02, 0x12, 0x0a, 0x0a, - 0x06, 0x41, 0x52, 0x4d, 0x5f, 0x56, 0x36, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x41, 0x52, 0x4d, - 0x5f, 0x56, 0x37, 0x10, 0x04, 0x22, 0xb5, 0x02, 0x0a, 0x0a, 0x49, 0x4f, 0x53, 0x74, 0x72, 0x61, - 0x74, 0x65, 0x67, 0x79, 0x12, 0x4b, 0x0a, 0x0d, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, - 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x4f, 0x53, 0x74, - 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x4d, - 0x6f, 0x64, 0x65, 0x52, 0x0c, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, - 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x6d, 0x6f, 0x64, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x74, 0x56, 0x61, 0x72, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x65, 0x61, 0x67, 0x65, + 0x72, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x45, 0x61, 0x67, 0x65, 0x72, + 0x1a, 0x37, 0x0a, 0x09, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x15, 0x0a, 0x13, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0xd6, 0x05, 0x0a, 0x0c, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x12, 0x29, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, + 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, + 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x3b, 0x0a, 0x09, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x79, 0x70, + 0x65, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x52, 0x09, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, + 0x06, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x48, 0x00, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x12, 0x30, 0x0a, 0x07, 0x6b, 0x38, 0x73, 0x5f, 0x70, 0x6f, 0x64, 0x18, 0x11, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x4b, 0x38, 0x73, 0x50, 0x6f, 0x64, 0x48, 0x00, 0x52, 0x06, 0x6b, 0x38, 0x73, + 0x50, 0x6f, 0x64, 0x12, 0x26, 0x0a, 0x03, 0x73, 0x71, 0x6c, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x12, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x53, 0x71, 0x6c, 0x48, 0x00, 0x52, 0x03, 0x73, 0x71, 0x6c, 0x12, 0x2a, 0x0a, 0x11, 0x74, + 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0f, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x49, 0x0a, 0x10, 0x73, 0x65, 0x63, 0x75, 0x72, + 0x69, 0x74, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, + 0x74, 0x52, 0x0f, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x12, 0x4f, 0x0a, 0x12, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, + 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x52, 0x11, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x73, 0x12, 0x3f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x10, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x39, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, + 0x08, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x22, 0x36, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x50, 0x6f, 0x72, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x50, 0x6f, 0x72, + 0x74, 0x22, 0xfc, 0x03, 0x0a, 0x09, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, + 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, + 0x12, 0x0a, 0x04, 0x61, 0x72, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x61, + 0x72, 0x67, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x03, 0x65, + 0x6e, 0x76, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x50, 0x61, 0x69, 0x72, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x12, 0x37, 0x0a, 0x06, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x50, 0x61, 0x69, 0x72, 0x42, 0x02, 0x18, 0x01, 0x52, 0x06, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x18, 0x07, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x50, 0x6f, 0x72, 0x74, + 0x52, 0x05, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x12, 0x41, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, + 0x61, 0x4c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0a, + 0x64, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x49, 0x0a, 0x0c, 0x61, 0x72, + 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x25, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, + 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0c, 0x61, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, + 0x63, 0x74, 0x75, 0x72, 0x65, 0x22, 0x49, 0x0a, 0x0c, 0x41, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, + 0x63, 0x74, 0x75, 0x72, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, + 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x4d, 0x44, 0x36, 0x34, 0x10, 0x01, 0x12, 0x09, 0x0a, + 0x05, 0x41, 0x52, 0x4d, 0x36, 0x34, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x41, 0x52, 0x4d, 0x5f, + 0x56, 0x36, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x41, 0x52, 0x4d, 0x5f, 0x56, 0x37, 0x10, 0x04, + 0x22, 0xb5, 0x02, 0x0a, 0x0a, 0x49, 0x4f, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x12, + 0x4b, 0x0a, 0x0d, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x6d, 0x6f, 0x64, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x4f, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, - 0x79, 0x2e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0a, 0x75, 0x70, - 0x6c, 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, 0x65, 0x22, 0x4c, 0x0a, 0x0c, 0x44, 0x6f, 0x77, 0x6e, - 0x6c, 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x44, 0x4f, 0x57, 0x4e, - 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x45, 0x41, 0x47, 0x45, 0x52, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, - 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x10, - 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x44, 0x4f, 0x5f, 0x4e, 0x4f, 0x54, 0x5f, 0x44, 0x4f, 0x57, 0x4e, - 0x4c, 0x4f, 0x41, 0x44, 0x10, 0x02, 0x22, 0x45, 0x0a, 0x0a, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, - 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x4f, - 0x4e, 0x5f, 0x45, 0x58, 0x49, 0x54, 0x10, 0x00, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x50, 0x4c, 0x4f, - 0x41, 0x44, 0x5f, 0x45, 0x41, 0x47, 0x45, 0x52, 0x10, 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x44, 0x4f, - 0x5f, 0x4e, 0x4f, 0x54, 0x5f, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x10, 0x02, 0x22, 0xa7, 0x02, - 0x0a, 0x11, 0x44, 0x61, 0x74, 0x61, 0x4c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x1d, 0x0a, - 0x0a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, - 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x49, 0x0a, - 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x31, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, - 0x74, 0x61, 0x4c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, - 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x3a, 0x0a, 0x0b, 0x69, 0x6f, 0x5f, 0x73, - 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x4f, - 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x52, 0x0a, 0x69, 0x6f, 0x53, 0x74, 0x72, 0x61, - 0x74, 0x65, 0x67, 0x79, 0x22, 0x31, 0x0a, 0x10, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, - 0x61, 0x70, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x08, 0x0a, 0x04, 0x4a, 0x53, 0x4f, 0x4e, - 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x59, 0x41, 0x4d, 0x4c, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, - 0x50, 0x52, 0x4f, 0x54, 0x4f, 0x10, 0x02, 0x22, 0xbd, 0x01, 0x0a, 0x06, 0x4b, 0x38, 0x73, 0x50, - 0x6f, 0x64, 0x12, 0x3c, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, + 0x79, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0c, + 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x45, 0x0a, 0x0b, + 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x49, 0x4f, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x2e, 0x55, 0x70, 0x6c, + 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0a, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x4d, + 0x6f, 0x64, 0x65, 0x22, 0x4c, 0x0a, 0x0c, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x4d, + 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44, 0x5f, + 0x45, 0x41, 0x47, 0x45, 0x52, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x44, 0x4f, 0x57, 0x4e, 0x4c, + 0x4f, 0x41, 0x44, 0x5f, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, + 0x44, 0x4f, 0x5f, 0x4e, 0x4f, 0x54, 0x5f, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, 0x44, 0x10, + 0x02, 0x22, 0x45, 0x0a, 0x0a, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x4d, 0x6f, 0x64, 0x65, 0x12, + 0x12, 0x0a, 0x0e, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x4f, 0x4e, 0x5f, 0x45, 0x58, 0x49, + 0x54, 0x10, 0x00, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x45, 0x41, + 0x47, 0x45, 0x52, 0x10, 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x44, 0x4f, 0x5f, 0x4e, 0x4f, 0x54, 0x5f, + 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x10, 0x02, 0x22, 0xa7, 0x02, 0x0a, 0x11, 0x44, 0x61, 0x74, + 0x61, 0x4c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x18, + 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x74, 0x68, 0x12, 0x49, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, + 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x31, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x4c, 0x6f, 0x61, + 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x06, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x12, 0x3a, 0x0a, 0x0b, 0x69, 0x6f, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, + 0x67, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x4f, 0x53, 0x74, 0x72, 0x61, 0x74, + 0x65, 0x67, 0x79, 0x52, 0x0a, 0x69, 0x6f, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x22, + 0x31, 0x0a, 0x10, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x46, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x12, 0x08, 0x0a, 0x04, 0x4a, 0x53, 0x4f, 0x4e, 0x10, 0x00, 0x12, 0x08, 0x0a, + 0x04, 0x59, 0x41, 0x4d, 0x4c, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x50, 0x52, 0x4f, 0x54, 0x4f, + 0x10, 0x02, 0x22, 0xbd, 0x01, 0x0a, 0x06, 0x4b, 0x38, 0x73, 0x50, 0x6f, 0x64, 0x12, 0x3c, 0x0a, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x4b, 0x38, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x32, 0x0a, 0x08, 0x70, + 0x6f, 0x64, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x07, 0x70, 0x6f, 0x64, 0x53, 0x70, 0x65, 0x63, 0x12, + 0x41, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x38, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x12, 0x32, 0x0a, 0x08, 0x70, 0x6f, 0x64, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x07, 0x70, 0x6f, 0x64, - 0x53, 0x70, 0x65, 0x63, 0x12, 0x41, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x4c, 0x6f, - 0x61, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0a, 0x64, 0x61, 0x74, - 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xa9, 0x02, 0x0a, 0x11, 0x4b, 0x38, 0x73, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x44, 0x0a, - 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x38, - 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x12, 0x53, 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x4c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x22, 0xa9, 0x02, 0x0a, 0x11, 0x4b, 0x38, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x44, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x38, 0x73, 0x4f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x61, 0x6e, 0x6e, - 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x1a, 0x3e, 0x0a, 0x10, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x22, 0x92, 0x01, 0x0a, 0x03, 0x53, 0x71, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x07, 0x64, 0x69, 0x61, - 0x6c, 0x65, 0x63, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x71, 0x6c, 0x2e, 0x44, - 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x52, 0x07, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x22, - 0x37, 0x0a, 0x07, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, - 0x44, 0x45, 0x46, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x41, 0x4e, 0x53, - 0x49, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x49, 0x56, 0x45, 0x10, 0x02, 0x12, 0x09, 0x0a, - 0x05, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x10, 0x03, 0x42, 0xb0, 0x01, 0x0a, 0x11, 0x63, 0x6f, 0x6d, - 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0a, - 0x54, 0x61, 0x73, 0x6b, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3a, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, - 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0xa2, 0x02, 0x03, 0x46, 0x43, 0x58, 0xaa, 0x02, - 0x0d, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x43, 0x6f, 0x72, 0x65, 0xca, 0x02, - 0x0d, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0xe2, 0x02, - 0x19, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0x5c, 0x47, - 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0e, 0x46, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x43, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x63, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x53, + 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x38, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3e, + 0x0a, 0x10, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x92, + 0x01, 0x0a, 0x03, 0x53, 0x71, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x34, 0x0a, 0x07, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x71, 0x6c, 0x2e, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, + 0x74, 0x52, 0x07, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x22, 0x37, 0x0a, 0x07, 0x44, 0x69, + 0x61, 0x6c, 0x65, 0x63, 0x74, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, 0x44, 0x45, 0x46, 0x49, 0x4e, + 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x41, 0x4e, 0x53, 0x49, 0x10, 0x01, 0x12, 0x08, + 0x0a, 0x04, 0x48, 0x49, 0x56, 0x45, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x54, 0x48, 0x45, + 0x52, 0x10, 0x03, 0x42, 0xb0, 0x01, 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0a, 0x54, 0x61, 0x73, 0x6b, 0x73, + 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, + 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x63, + 0x6f, 0x72, 0x65, 0xa2, 0x02, 0x03, 0x46, 0x43, 0x58, 0xaa, 0x02, 0x0d, 0x46, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x43, 0x6f, 0x72, 0x65, 0xca, 0x02, 0x0d, 0x46, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0xe2, 0x02, 0x19, 0x46, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x3a, 0x3a, 0x43, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/flyteidl/gen/pb-go/flyteidl/core/workflow.pb.go b/flyteidl/gen/pb-go/flyteidl/core/workflow.pb.go index 14ac613ea6..26392969fe 100644 --- a/flyteidl/gen/pb-go/flyteidl/core/workflow.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/core/workflow.pb.go @@ -10,7 +10,7 @@ import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" durationpb "google.golang.org/protobuf/types/known/durationpb" - _ "google.golang.org/protobuf/types/known/wrapperspb" + wrapperspb "google.golang.org/protobuf/types/known/wrapperspb" reflect "reflect" sync "sync" ) @@ -72,6 +72,60 @@ func (ArrayNode_ExecutionMode) EnumDescriptor() ([]byte, []int) { return file_flyteidl_core_workflow_proto_rawDescGZIP(), []int{9, 0} } +type ArrayNode_DataMode int32 + +const ( + // Indicates the ArrayNode's input is a list of input values that map to subNode executions. + // The file path set for the subNode will be the ArrayNode's input file, but the in-memory + // value utilized in propeller will be the individual value for each subNode execution. + // SubNode executions need to be able to read in and parse the individual value to execute correctly. + ArrayNode_SINGLE_INPUT_FILE ArrayNode_DataMode = 0 + // Indicates the ArrayNode's input is a list of input values that map to subNode executions. + // Propeller will create input files for each ArrayNode subNode by parsing the inputs and + // setting the InputBindings on each subNodeSpec. Both the file path and in-memory input values will + // be the individual value for each subNode execution. + ArrayNode_INDIVIDUAL_INPUT_FILES ArrayNode_DataMode = 1 +) + +// Enum value maps for ArrayNode_DataMode. +var ( + ArrayNode_DataMode_name = map[int32]string{ + 0: "SINGLE_INPUT_FILE", + 1: "INDIVIDUAL_INPUT_FILES", + } + ArrayNode_DataMode_value = map[string]int32{ + "SINGLE_INPUT_FILE": 0, + "INDIVIDUAL_INPUT_FILES": 1, + } +) + +func (x ArrayNode_DataMode) Enum() *ArrayNode_DataMode { + p := new(ArrayNode_DataMode) + *p = x + return p +} + +func (x ArrayNode_DataMode) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ArrayNode_DataMode) Descriptor() protoreflect.EnumDescriptor { + return file_flyteidl_core_workflow_proto_enumTypes[1].Descriptor() +} + +func (ArrayNode_DataMode) Type() protoreflect.EnumType { + return &file_flyteidl_core_workflow_proto_enumTypes[1] +} + +func (x ArrayNode_DataMode) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ArrayNode_DataMode.Descriptor instead. +func (ArrayNode_DataMode) EnumDescriptor() ([]byte, []int) { + return file_flyteidl_core_workflow_proto_rawDescGZIP(), []int{9, 1} +} + // Failure Handling Strategy type WorkflowMetadata_OnFailurePolicy int32 @@ -110,11 +164,11 @@ func (x WorkflowMetadata_OnFailurePolicy) String() string { } func (WorkflowMetadata_OnFailurePolicy) Descriptor() protoreflect.EnumDescriptor { - return file_flyteidl_core_workflow_proto_enumTypes[1].Descriptor() + return file_flyteidl_core_workflow_proto_enumTypes[2].Descriptor() } func (WorkflowMetadata_OnFailurePolicy) Type() protoreflect.EnumType { - return &file_flyteidl_core_workflow_proto_enumTypes[1] + return &file_flyteidl_core_workflow_proto_enumTypes[2] } func (x WorkflowMetadata_OnFailurePolicy) Number() protoreflect.EnumNumber { @@ -785,6 +839,10 @@ type ArrayNode struct { SuccessCriteria isArrayNode_SuccessCriteria `protobuf_oneof:"success_criteria"` // execution_mode determines the execution path for ArrayNode. ExecutionMode ArrayNode_ExecutionMode `protobuf:"varint,5,opt,name=execution_mode,json=executionMode,proto3,enum=flyteidl.core.ArrayNode_ExecutionMode" json:"execution_mode,omitempty"` + // Indicates whether the sub node's original interface was altered + IsOriginalSubNodeInterface *wrapperspb.BoolValue `protobuf:"bytes,6,opt,name=is_original_sub_node_interface,json=isOriginalSubNodeInterface,proto3" json:"is_original_sub_node_interface,omitempty"` + // data_mode determines how input data is passed to the sub-nodes + DataMode ArrayNode_DataMode `protobuf:"varint,7,opt,name=data_mode,json=dataMode,proto3,enum=flyteidl.core.ArrayNode_DataMode" json:"data_mode,omitempty"` } func (x *ArrayNode) Reset() { @@ -868,6 +926,20 @@ func (x *ArrayNode) GetExecutionMode() ArrayNode_ExecutionMode { return ArrayNode_MINIMAL_STATE } +func (x *ArrayNode) GetIsOriginalSubNodeInterface() *wrapperspb.BoolValue { + if x != nil { + return x.IsOriginalSubNodeInterface + } + return nil +} + +func (x *ArrayNode) GetDataMode() ArrayNode_DataMode { + if x != nil { + return x.DataMode + } + return ArrayNode_SINGLE_INPUT_FILE +} + type isArrayNode_ParallelismOption interface { isArrayNode_ParallelismOption() } @@ -1801,7 +1873,7 @@ var file_flyteidl_core_workflow_proto_rawDesc = []byte{ 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x6c, 0x65, 0x65, 0x70, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x05, 0x73, 0x6c, 0x65, 0x65, 0x70, 0x42, 0x0b, - 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xda, 0x02, 0x0a, 0x09, + 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0xb9, 0x04, 0x0a, 0x09, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x27, 0x0a, 0x04, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x04, 0x6e, 0x6f, @@ -1817,166 +1889,180 @@ var file_flyteidl_core_workflow_proto_rawDesc = []byte{ 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0d, 0x65, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x22, 0x32, 0x0a, 0x0d, 0x45, 0x78, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x5e, 0x0a, 0x1e, 0x69, 0x73, + 0x5f, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x62, 0x5f, 0x6e, 0x6f, + 0x64, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x1a, + 0x69, 0x73, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x53, 0x75, 0x62, 0x4e, 0x6f, 0x64, + 0x65, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x3e, 0x0a, 0x09, 0x64, 0x61, + 0x74, 0x61, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, + 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x4d, 0x6f, 0x64, 0x65, + 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x4d, 0x6f, 0x64, 0x65, 0x22, 0x32, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x11, 0x0a, 0x0d, 0x4d, 0x49, 0x4e, 0x49, 0x4d, 0x41, 0x4c, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x45, 0x10, 0x00, 0x12, 0x0e, - 0x0a, 0x0a, 0x46, 0x55, 0x4c, 0x4c, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x45, 0x10, 0x01, 0x42, 0x14, - 0x0a, 0x12, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x5f, 0x6f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x12, 0x0a, 0x10, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, - 0x63, 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x61, 0x22, 0x8c, 0x03, 0x0a, 0x0c, 0x4e, 0x6f, 0x64, - 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, - 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, - 0x75, 0x74, 0x12, 0x36, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x74, 0x72, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, - 0x79, 0x52, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x26, 0x0a, 0x0d, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x08, 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, - 0x6c, 0x65, 0x12, 0x1e, 0x0a, 0x09, 0x63, 0x61, 0x63, 0x68, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x08, 0x48, 0x01, 0x52, 0x09, 0x63, 0x61, 0x63, 0x68, 0x65, 0x61, 0x62, - 0x6c, 0x65, 0x12, 0x25, 0x0a, 0x0d, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0c, 0x63, 0x61, 0x63, - 0x68, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x12, 0x63, 0x61, 0x63, - 0x68, 0x65, 0x5f, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x62, 0x6c, 0x65, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x08, 0x48, 0x03, 0x52, 0x11, 0x63, 0x61, 0x63, 0x68, 0x65, 0x53, 0x65, - 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x62, 0x6c, 0x65, 0x42, 0x15, 0x0a, 0x13, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x42, 0x11, 0x0a, 0x0f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x42, 0x15, 0x0a, 0x13, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x1a, 0x0a, 0x18, 0x63, - 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x62, 0x6c, - 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2f, 0x0a, 0x05, 0x41, 0x6c, 0x69, 0x61, 0x73, - 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x76, - 0x61, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x22, 0x9f, 0x04, 0x0a, 0x04, 0x4e, 0x6f, 0x64, - 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, - 0x64, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x06, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x69, - 0x6e, 0x67, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x75, 0x70, - 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x73, 0x18, - 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0f, 0x75, 0x70, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4e, - 0x6f, 0x64, 0x65, 0x49, 0x64, 0x73, 0x12, 0x3b, 0x0a, 0x0e, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x5f, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, - 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, - 0x6c, 0x69, 0x61, 0x73, 0x52, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x6c, 0x69, 0x61, - 0x73, 0x65, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6e, 0x6f, 0x64, 0x65, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, - 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x48, - 0x00, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x42, 0x0a, 0x0d, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, + 0x0a, 0x0a, 0x46, 0x55, 0x4c, 0x4c, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x45, 0x10, 0x01, 0x22, 0x3d, + 0x0a, 0x08, 0x44, 0x61, 0x74, 0x61, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x15, 0x0a, 0x11, 0x53, 0x49, + 0x4e, 0x47, 0x4c, 0x45, 0x5f, 0x49, 0x4e, 0x50, 0x55, 0x54, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, + 0x00, 0x12, 0x1a, 0x0a, 0x16, 0x49, 0x4e, 0x44, 0x49, 0x56, 0x49, 0x44, 0x55, 0x41, 0x4c, 0x5f, + 0x49, 0x4e, 0x50, 0x55, 0x54, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x53, 0x10, 0x01, 0x42, 0x14, 0x0a, + 0x12, 0x70, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x5f, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x42, 0x12, 0x0a, 0x10, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x63, + 0x72, 0x69, 0x74, 0x65, 0x72, 0x69, 0x61, 0x22, 0x8c, 0x03, 0x0a, 0x0c, 0x4e, 0x6f, 0x64, 0x65, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x33, 0x0a, 0x07, + 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, + 0x74, 0x12, 0x36, 0x0a, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x52, 0x65, 0x74, 0x72, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, + 0x52, 0x07, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x26, 0x0a, 0x0d, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, + 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, + 0x65, 0x12, 0x1e, 0x0a, 0x09, 0x63, 0x61, 0x63, 0x68, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x08, 0x48, 0x01, 0x52, 0x09, 0x63, 0x61, 0x63, 0x68, 0x65, 0x61, 0x62, 0x6c, + 0x65, 0x12, 0x25, 0x0a, 0x0d, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0c, 0x63, 0x61, 0x63, 0x68, + 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x12, 0x63, 0x61, 0x63, 0x68, + 0x65, 0x5f, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x09, + 0x20, 0x01, 0x28, 0x08, 0x48, 0x03, 0x52, 0x11, 0x63, 0x61, 0x63, 0x68, 0x65, 0x53, 0x65, 0x72, + 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x62, 0x6c, 0x65, 0x42, 0x15, 0x0a, 0x13, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x42, 0x11, 0x0a, 0x0f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x42, 0x15, 0x0a, 0x13, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x76, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x1a, 0x0a, 0x18, 0x63, 0x61, + 0x63, 0x68, 0x65, 0x5f, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x61, 0x62, 0x6c, 0x65, + 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2f, 0x0a, 0x05, 0x41, 0x6c, 0x69, 0x61, 0x73, 0x12, + 0x10, 0x0a, 0x03, 0x76, 0x61, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x76, 0x61, + 0x72, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x22, 0x9f, 0x04, 0x0a, 0x04, 0x4e, 0x6f, 0x64, 0x65, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x48, - 0x00, 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x12, - 0x3c, 0x0a, 0x0b, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x08, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x6f, 0x64, 0x65, 0x48, - 0x00, 0x52, 0x0a, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x36, 0x0a, - 0x09, 0x67, 0x61, 0x74, 0x65, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x47, 0x61, 0x74, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, 0x52, 0x08, 0x67, 0x61, 0x74, - 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x61, 0x72, 0x72, 0x61, 0x79, 0x5f, 0x6e, - 0x6f, 0x64, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4e, - 0x6f, 0x64, 0x65, 0x48, 0x00, 0x52, 0x09, 0x61, 0x72, 0x72, 0x61, 0x79, 0x4e, 0x6f, 0x64, 0x65, - 0x42, 0x08, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x22, 0xfc, 0x02, 0x0a, 0x10, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, - 0x4d, 0x0a, 0x12, 0x71, 0x75, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x6f, 0x66, 0x5f, 0x73, 0x65, - 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x51, 0x75, 0x61, 0x6c, - 0x69, 0x74, 0x79, 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x10, 0x71, 0x75, - 0x61, 0x6c, 0x69, 0x74, 0x79, 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x4e, - 0x0a, 0x0a, 0x6f, 0x6e, 0x5f, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x2f, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x4f, 0x6e, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x50, 0x6f, 0x6c, - 0x69, 0x63, 0x79, 0x52, 0x09, 0x6f, 0x6e, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x12, 0x3d, - 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x61, - 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x1a, 0x37, 0x0a, - 0x09, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x51, 0x0a, 0x0f, 0x4f, 0x6e, 0x46, 0x61, 0x69, 0x6c, - 0x75, 0x72, 0x65, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x14, 0x0a, 0x10, 0x46, 0x41, 0x49, - 0x4c, 0x5f, 0x49, 0x4d, 0x4d, 0x45, 0x44, 0x49, 0x41, 0x54, 0x45, 0x4c, 0x59, 0x10, 0x00, 0x12, - 0x28, 0x0a, 0x24, 0x46, 0x41, 0x49, 0x4c, 0x5f, 0x41, 0x46, 0x54, 0x45, 0x52, 0x5f, 0x45, 0x58, - 0x45, 0x43, 0x55, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x4e, 0x4f, 0x44, 0x45, 0x53, 0x5f, 0x43, - 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x01, 0x22, 0x40, 0x0a, 0x18, 0x57, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, - 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x22, 0xa2, 0x03, 0x0a, 0x10, - 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x12, 0x29, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, - 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3b, 0x0a, 0x08, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, - 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x3b, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x66, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x79, 0x70, 0x65, - 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x52, 0x09, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x29, 0x0a, 0x05, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x6e, 0x6f, 0x64, 0x65, 0x73, - 0x12, 0x30, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x73, 0x12, 0x36, 0x0a, 0x0c, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x5f, 0x6e, 0x6f, - 0x64, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x66, - 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x52, 0x10, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, - 0x22, 0xc5, 0x01, 0x0a, 0x11, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4f, 0x76, 0x65, - 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x4f, - 0x0a, 0x12, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, - 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x52, 0x11, 0x65, 0x78, - 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, - 0x27, 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x22, 0xba, 0x01, 0x0a, 0x12, 0x4c, 0x61, 0x75, - 0x6e, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, + 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2e, 0x0a, 0x06, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, + 0x67, 0x52, 0x06, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x75, 0x70, 0x73, + 0x74, 0x72, 0x65, 0x61, 0x6d, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x0f, 0x75, 0x70, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x4e, 0x6f, + 0x64, 0x65, 0x49, 0x64, 0x73, 0x12, 0x3b, 0x0a, 0x0e, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, + 0x61, 0x6c, 0x69, 0x61, 0x73, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, + 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x6c, + 0x69, 0x61, 0x73, 0x52, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x6c, 0x69, 0x61, 0x73, + 0x65, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, + 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x42, 0x0a, 0x0d, 0x77, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, + 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x3c, + 0x0a, 0x0b, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x08, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x42, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, + 0x52, 0x0a, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x36, 0x0a, 0x09, + 0x67, 0x61, 0x74, 0x65, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x47, 0x61, 0x74, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x48, 0x00, 0x52, 0x08, 0x67, 0x61, 0x74, 0x65, + 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x39, 0x0a, 0x0a, 0x61, 0x72, 0x72, 0x61, 0x79, 0x5f, 0x6e, 0x6f, + 0x64, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4e, 0x6f, + 0x64, 0x65, 0x48, 0x00, 0x52, 0x09, 0x61, 0x72, 0x72, 0x61, 0x79, 0x4e, 0x6f, 0x64, 0x65, 0x42, + 0x08, 0x0a, 0x06, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x22, 0xfc, 0x02, 0x0a, 0x10, 0x57, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x4d, + 0x0a, 0x12, 0x71, 0x75, 0x61, 0x6c, 0x69, 0x74, 0x79, 0x5f, 0x6f, 0x66, 0x5f, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x51, 0x75, 0x61, 0x6c, 0x69, + 0x74, 0x79, 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x10, 0x71, 0x75, 0x61, + 0x6c, 0x69, 0x74, 0x79, 0x4f, 0x66, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x4e, 0x0a, + 0x0a, 0x6f, 0x6e, 0x5f, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x2f, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x4f, 0x6e, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x50, 0x6f, 0x6c, 0x69, + 0x63, 0x79, 0x52, 0x09, 0x6f, 0x6e, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x12, 0x3d, 0x0a, + 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x54, 0x61, 0x67, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x1a, 0x37, 0x0a, 0x09, + 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x51, 0x0a, 0x0f, 0x4f, 0x6e, 0x46, 0x61, 0x69, 0x6c, 0x75, + 0x72, 0x65, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x14, 0x0a, 0x10, 0x46, 0x41, 0x49, 0x4c, + 0x5f, 0x49, 0x4d, 0x4d, 0x45, 0x44, 0x49, 0x41, 0x54, 0x45, 0x4c, 0x59, 0x10, 0x00, 0x12, 0x28, + 0x0a, 0x24, 0x46, 0x41, 0x49, 0x4c, 0x5f, 0x41, 0x46, 0x54, 0x45, 0x52, 0x5f, 0x45, 0x58, 0x45, + 0x43, 0x55, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x5f, 0x4e, 0x4f, 0x44, 0x45, 0x53, 0x5f, 0x43, 0x4f, + 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x01, 0x22, 0x40, 0x0a, 0x18, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x44, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x72, 0x75, 0x70, + 0x74, 0x69, 0x62, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x72, 0x75, 0x70, 0x74, 0x69, 0x62, 0x6c, 0x65, 0x22, 0xa2, 0x03, 0x0a, 0x10, 0x57, + 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x29, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3b, 0x0a, 0x09, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x79, - 0x70, 0x65, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x52, 0x09, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x3c, 0x0a, 0x0c, 0x66, 0x69, 0x78, 0x65, 0x64, - 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x52, 0x0b, 0x66, 0x69, 0x78, 0x65, 0x64, 0x49, - 0x6e, 0x70, 0x75, 0x74, 0x73, 0x42, 0xb3, 0x01, 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0d, 0x57, 0x6f, 0x72, - 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3a, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, - 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0xa2, 0x02, 0x03, 0x46, 0x43, 0x58, 0xaa, 0x02, - 0x0d, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x43, 0x6f, 0x72, 0x65, 0xca, 0x02, - 0x0d, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0xe2, 0x02, - 0x19, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0x5c, 0x47, - 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0e, 0x46, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x43, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3b, 0x0a, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x3b, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x66, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x64, + 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x52, 0x09, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x66, 0x61, 0x63, 0x65, 0x12, 0x29, 0x0a, 0x05, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x6e, 0x6f, 0x64, 0x65, 0x73, 0x12, + 0x30, 0x0a, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x42, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x73, 0x12, 0x36, 0x0a, 0x0c, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x5f, 0x6e, 0x6f, 0x64, + 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x66, 0x61, + 0x69, 0x6c, 0x75, 0x72, 0x65, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x52, 0x10, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x22, + 0xc5, 0x01, 0x0a, 0x11, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4f, 0x76, 0x65, 0x72, + 0x72, 0x69, 0x64, 0x65, 0x73, 0x12, 0x36, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x4f, 0x0a, + 0x12, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, + 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x52, 0x11, 0x65, 0x78, 0x74, + 0x65, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x27, + 0x0a, 0x0f, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, + 0x65, 0x72, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x22, 0xba, 0x01, 0x0a, 0x12, 0x4c, 0x61, 0x75, 0x6e, + 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x29, + 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, + 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3b, 0x0a, 0x09, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x79, 0x70, + 0x65, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x52, 0x09, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x3c, 0x0a, 0x0c, 0x66, 0x69, 0x78, 0x65, 0x64, 0x5f, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, + 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x52, 0x0b, 0x66, 0x69, 0x78, 0x65, 0x64, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x73, 0x42, 0xb3, 0x01, 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0d, 0x57, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3a, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, + 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, + 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0xa2, 0x02, 0x03, 0x46, 0x43, 0x58, 0xaa, 0x02, 0x0d, + 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x43, 0x6f, 0x72, 0x65, 0xca, 0x02, 0x0d, + 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0xe2, 0x02, 0x19, + 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x43, 0x6f, 0x72, 0x65, 0x5c, 0x47, 0x50, + 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x43, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( @@ -1991,92 +2077,96 @@ func file_flyteidl_core_workflow_proto_rawDescGZIP() []byte { return file_flyteidl_core_workflow_proto_rawDescData } -var file_flyteidl_core_workflow_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_flyteidl_core_workflow_proto_enumTypes = make([]protoimpl.EnumInfo, 3) var file_flyteidl_core_workflow_proto_msgTypes = make([]protoimpl.MessageInfo, 19) var file_flyteidl_core_workflow_proto_goTypes = []interface{}{ (ArrayNode_ExecutionMode)(0), // 0: flyteidl.core.ArrayNode.ExecutionMode - (WorkflowMetadata_OnFailurePolicy)(0), // 1: flyteidl.core.WorkflowMetadata.OnFailurePolicy - (*IfBlock)(nil), // 2: flyteidl.core.IfBlock - (*IfElseBlock)(nil), // 3: flyteidl.core.IfElseBlock - (*BranchNode)(nil), // 4: flyteidl.core.BranchNode - (*TaskNode)(nil), // 5: flyteidl.core.TaskNode - (*WorkflowNode)(nil), // 6: flyteidl.core.WorkflowNode - (*ApproveCondition)(nil), // 7: flyteidl.core.ApproveCondition - (*SignalCondition)(nil), // 8: flyteidl.core.SignalCondition - (*SleepCondition)(nil), // 9: flyteidl.core.SleepCondition - (*GateNode)(nil), // 10: flyteidl.core.GateNode - (*ArrayNode)(nil), // 11: flyteidl.core.ArrayNode - (*NodeMetadata)(nil), // 12: flyteidl.core.NodeMetadata - (*Alias)(nil), // 13: flyteidl.core.Alias - (*Node)(nil), // 14: flyteidl.core.Node - (*WorkflowMetadata)(nil), // 15: flyteidl.core.WorkflowMetadata - (*WorkflowMetadataDefaults)(nil), // 16: flyteidl.core.WorkflowMetadataDefaults - (*WorkflowTemplate)(nil), // 17: flyteidl.core.WorkflowTemplate - (*TaskNodeOverrides)(nil), // 18: flyteidl.core.TaskNodeOverrides - (*LaunchPlanTemplate)(nil), // 19: flyteidl.core.LaunchPlanTemplate - nil, // 20: flyteidl.core.WorkflowMetadata.TagsEntry - (*BooleanExpression)(nil), // 21: flyteidl.core.BooleanExpression - (*Error)(nil), // 22: flyteidl.core.Error - (*Identifier)(nil), // 23: flyteidl.core.Identifier - (*LiteralType)(nil), // 24: flyteidl.core.LiteralType - (*durationpb.Duration)(nil), // 25: google.protobuf.Duration - (*RetryStrategy)(nil), // 26: flyteidl.core.RetryStrategy - (*Binding)(nil), // 27: flyteidl.core.Binding - (*QualityOfService)(nil), // 28: flyteidl.core.QualityOfService - (*TypedInterface)(nil), // 29: flyteidl.core.TypedInterface - (*Resources)(nil), // 30: flyteidl.core.Resources - (*ExtendedResources)(nil), // 31: flyteidl.core.ExtendedResources - (*LiteralMap)(nil), // 32: flyteidl.core.LiteralMap + (ArrayNode_DataMode)(0), // 1: flyteidl.core.ArrayNode.DataMode + (WorkflowMetadata_OnFailurePolicy)(0), // 2: flyteidl.core.WorkflowMetadata.OnFailurePolicy + (*IfBlock)(nil), // 3: flyteidl.core.IfBlock + (*IfElseBlock)(nil), // 4: flyteidl.core.IfElseBlock + (*BranchNode)(nil), // 5: flyteidl.core.BranchNode + (*TaskNode)(nil), // 6: flyteidl.core.TaskNode + (*WorkflowNode)(nil), // 7: flyteidl.core.WorkflowNode + (*ApproveCondition)(nil), // 8: flyteidl.core.ApproveCondition + (*SignalCondition)(nil), // 9: flyteidl.core.SignalCondition + (*SleepCondition)(nil), // 10: flyteidl.core.SleepCondition + (*GateNode)(nil), // 11: flyteidl.core.GateNode + (*ArrayNode)(nil), // 12: flyteidl.core.ArrayNode + (*NodeMetadata)(nil), // 13: flyteidl.core.NodeMetadata + (*Alias)(nil), // 14: flyteidl.core.Alias + (*Node)(nil), // 15: flyteidl.core.Node + (*WorkflowMetadata)(nil), // 16: flyteidl.core.WorkflowMetadata + (*WorkflowMetadataDefaults)(nil), // 17: flyteidl.core.WorkflowMetadataDefaults + (*WorkflowTemplate)(nil), // 18: flyteidl.core.WorkflowTemplate + (*TaskNodeOverrides)(nil), // 19: flyteidl.core.TaskNodeOverrides + (*LaunchPlanTemplate)(nil), // 20: flyteidl.core.LaunchPlanTemplate + nil, // 21: flyteidl.core.WorkflowMetadata.TagsEntry + (*BooleanExpression)(nil), // 22: flyteidl.core.BooleanExpression + (*Error)(nil), // 23: flyteidl.core.Error + (*Identifier)(nil), // 24: flyteidl.core.Identifier + (*LiteralType)(nil), // 25: flyteidl.core.LiteralType + (*durationpb.Duration)(nil), // 26: google.protobuf.Duration + (*wrapperspb.BoolValue)(nil), // 27: google.protobuf.BoolValue + (*RetryStrategy)(nil), // 28: flyteidl.core.RetryStrategy + (*Binding)(nil), // 29: flyteidl.core.Binding + (*QualityOfService)(nil), // 30: flyteidl.core.QualityOfService + (*TypedInterface)(nil), // 31: flyteidl.core.TypedInterface + (*Resources)(nil), // 32: flyteidl.core.Resources + (*ExtendedResources)(nil), // 33: flyteidl.core.ExtendedResources + (*LiteralMap)(nil), // 34: flyteidl.core.LiteralMap } var file_flyteidl_core_workflow_proto_depIdxs = []int32{ - 21, // 0: flyteidl.core.IfBlock.condition:type_name -> flyteidl.core.BooleanExpression - 14, // 1: flyteidl.core.IfBlock.then_node:type_name -> flyteidl.core.Node - 2, // 2: flyteidl.core.IfElseBlock.case:type_name -> flyteidl.core.IfBlock - 2, // 3: flyteidl.core.IfElseBlock.other:type_name -> flyteidl.core.IfBlock - 14, // 4: flyteidl.core.IfElseBlock.else_node:type_name -> flyteidl.core.Node - 22, // 5: flyteidl.core.IfElseBlock.error:type_name -> flyteidl.core.Error - 3, // 6: flyteidl.core.BranchNode.if_else:type_name -> flyteidl.core.IfElseBlock - 23, // 7: flyteidl.core.TaskNode.reference_id:type_name -> flyteidl.core.Identifier - 18, // 8: flyteidl.core.TaskNode.overrides:type_name -> flyteidl.core.TaskNodeOverrides - 23, // 9: flyteidl.core.WorkflowNode.launchplan_ref:type_name -> flyteidl.core.Identifier - 23, // 10: flyteidl.core.WorkflowNode.sub_workflow_ref:type_name -> flyteidl.core.Identifier - 24, // 11: flyteidl.core.SignalCondition.type:type_name -> flyteidl.core.LiteralType - 25, // 12: flyteidl.core.SleepCondition.duration:type_name -> google.protobuf.Duration - 7, // 13: flyteidl.core.GateNode.approve:type_name -> flyteidl.core.ApproveCondition - 8, // 14: flyteidl.core.GateNode.signal:type_name -> flyteidl.core.SignalCondition - 9, // 15: flyteidl.core.GateNode.sleep:type_name -> flyteidl.core.SleepCondition - 14, // 16: flyteidl.core.ArrayNode.node:type_name -> flyteidl.core.Node + 22, // 0: flyteidl.core.IfBlock.condition:type_name -> flyteidl.core.BooleanExpression + 15, // 1: flyteidl.core.IfBlock.then_node:type_name -> flyteidl.core.Node + 3, // 2: flyteidl.core.IfElseBlock.case:type_name -> flyteidl.core.IfBlock + 3, // 3: flyteidl.core.IfElseBlock.other:type_name -> flyteidl.core.IfBlock + 15, // 4: flyteidl.core.IfElseBlock.else_node:type_name -> flyteidl.core.Node + 23, // 5: flyteidl.core.IfElseBlock.error:type_name -> flyteidl.core.Error + 4, // 6: flyteidl.core.BranchNode.if_else:type_name -> flyteidl.core.IfElseBlock + 24, // 7: flyteidl.core.TaskNode.reference_id:type_name -> flyteidl.core.Identifier + 19, // 8: flyteidl.core.TaskNode.overrides:type_name -> flyteidl.core.TaskNodeOverrides + 24, // 9: flyteidl.core.WorkflowNode.launchplan_ref:type_name -> flyteidl.core.Identifier + 24, // 10: flyteidl.core.WorkflowNode.sub_workflow_ref:type_name -> flyteidl.core.Identifier + 25, // 11: flyteidl.core.SignalCondition.type:type_name -> flyteidl.core.LiteralType + 26, // 12: flyteidl.core.SleepCondition.duration:type_name -> google.protobuf.Duration + 8, // 13: flyteidl.core.GateNode.approve:type_name -> flyteidl.core.ApproveCondition + 9, // 14: flyteidl.core.GateNode.signal:type_name -> flyteidl.core.SignalCondition + 10, // 15: flyteidl.core.GateNode.sleep:type_name -> flyteidl.core.SleepCondition + 15, // 16: flyteidl.core.ArrayNode.node:type_name -> flyteidl.core.Node 0, // 17: flyteidl.core.ArrayNode.execution_mode:type_name -> flyteidl.core.ArrayNode.ExecutionMode - 25, // 18: flyteidl.core.NodeMetadata.timeout:type_name -> google.protobuf.Duration - 26, // 19: flyteidl.core.NodeMetadata.retries:type_name -> flyteidl.core.RetryStrategy - 12, // 20: flyteidl.core.Node.metadata:type_name -> flyteidl.core.NodeMetadata - 27, // 21: flyteidl.core.Node.inputs:type_name -> flyteidl.core.Binding - 13, // 22: flyteidl.core.Node.output_aliases:type_name -> flyteidl.core.Alias - 5, // 23: flyteidl.core.Node.task_node:type_name -> flyteidl.core.TaskNode - 6, // 24: flyteidl.core.Node.workflow_node:type_name -> flyteidl.core.WorkflowNode - 4, // 25: flyteidl.core.Node.branch_node:type_name -> flyteidl.core.BranchNode - 10, // 26: flyteidl.core.Node.gate_node:type_name -> flyteidl.core.GateNode - 11, // 27: flyteidl.core.Node.array_node:type_name -> flyteidl.core.ArrayNode - 28, // 28: flyteidl.core.WorkflowMetadata.quality_of_service:type_name -> flyteidl.core.QualityOfService - 1, // 29: flyteidl.core.WorkflowMetadata.on_failure:type_name -> flyteidl.core.WorkflowMetadata.OnFailurePolicy - 20, // 30: flyteidl.core.WorkflowMetadata.tags:type_name -> flyteidl.core.WorkflowMetadata.TagsEntry - 23, // 31: flyteidl.core.WorkflowTemplate.id:type_name -> flyteidl.core.Identifier - 15, // 32: flyteidl.core.WorkflowTemplate.metadata:type_name -> flyteidl.core.WorkflowMetadata - 29, // 33: flyteidl.core.WorkflowTemplate.interface:type_name -> flyteidl.core.TypedInterface - 14, // 34: flyteidl.core.WorkflowTemplate.nodes:type_name -> flyteidl.core.Node - 27, // 35: flyteidl.core.WorkflowTemplate.outputs:type_name -> flyteidl.core.Binding - 14, // 36: flyteidl.core.WorkflowTemplate.failure_node:type_name -> flyteidl.core.Node - 16, // 37: flyteidl.core.WorkflowTemplate.metadata_defaults:type_name -> flyteidl.core.WorkflowMetadataDefaults - 30, // 38: flyteidl.core.TaskNodeOverrides.resources:type_name -> flyteidl.core.Resources - 31, // 39: flyteidl.core.TaskNodeOverrides.extended_resources:type_name -> flyteidl.core.ExtendedResources - 23, // 40: flyteidl.core.LaunchPlanTemplate.id:type_name -> flyteidl.core.Identifier - 29, // 41: flyteidl.core.LaunchPlanTemplate.interface:type_name -> flyteidl.core.TypedInterface - 32, // 42: flyteidl.core.LaunchPlanTemplate.fixed_inputs:type_name -> flyteidl.core.LiteralMap - 43, // [43:43] is the sub-list for method output_type - 43, // [43:43] is the sub-list for method input_type - 43, // [43:43] is the sub-list for extension type_name - 43, // [43:43] is the sub-list for extension extendee - 0, // [0:43] is the sub-list for field type_name + 27, // 18: flyteidl.core.ArrayNode.is_original_sub_node_interface:type_name -> google.protobuf.BoolValue + 1, // 19: flyteidl.core.ArrayNode.data_mode:type_name -> flyteidl.core.ArrayNode.DataMode + 26, // 20: flyteidl.core.NodeMetadata.timeout:type_name -> google.protobuf.Duration + 28, // 21: flyteidl.core.NodeMetadata.retries:type_name -> flyteidl.core.RetryStrategy + 13, // 22: flyteidl.core.Node.metadata:type_name -> flyteidl.core.NodeMetadata + 29, // 23: flyteidl.core.Node.inputs:type_name -> flyteidl.core.Binding + 14, // 24: flyteidl.core.Node.output_aliases:type_name -> flyteidl.core.Alias + 6, // 25: flyteidl.core.Node.task_node:type_name -> flyteidl.core.TaskNode + 7, // 26: flyteidl.core.Node.workflow_node:type_name -> flyteidl.core.WorkflowNode + 5, // 27: flyteidl.core.Node.branch_node:type_name -> flyteidl.core.BranchNode + 11, // 28: flyteidl.core.Node.gate_node:type_name -> flyteidl.core.GateNode + 12, // 29: flyteidl.core.Node.array_node:type_name -> flyteidl.core.ArrayNode + 30, // 30: flyteidl.core.WorkflowMetadata.quality_of_service:type_name -> flyteidl.core.QualityOfService + 2, // 31: flyteidl.core.WorkflowMetadata.on_failure:type_name -> flyteidl.core.WorkflowMetadata.OnFailurePolicy + 21, // 32: flyteidl.core.WorkflowMetadata.tags:type_name -> flyteidl.core.WorkflowMetadata.TagsEntry + 24, // 33: flyteidl.core.WorkflowTemplate.id:type_name -> flyteidl.core.Identifier + 16, // 34: flyteidl.core.WorkflowTemplate.metadata:type_name -> flyteidl.core.WorkflowMetadata + 31, // 35: flyteidl.core.WorkflowTemplate.interface:type_name -> flyteidl.core.TypedInterface + 15, // 36: flyteidl.core.WorkflowTemplate.nodes:type_name -> flyteidl.core.Node + 29, // 37: flyteidl.core.WorkflowTemplate.outputs:type_name -> flyteidl.core.Binding + 15, // 38: flyteidl.core.WorkflowTemplate.failure_node:type_name -> flyteidl.core.Node + 17, // 39: flyteidl.core.WorkflowTemplate.metadata_defaults:type_name -> flyteidl.core.WorkflowMetadataDefaults + 32, // 40: flyteidl.core.TaskNodeOverrides.resources:type_name -> flyteidl.core.Resources + 33, // 41: flyteidl.core.TaskNodeOverrides.extended_resources:type_name -> flyteidl.core.ExtendedResources + 24, // 42: flyteidl.core.LaunchPlanTemplate.id:type_name -> flyteidl.core.Identifier + 31, // 43: flyteidl.core.LaunchPlanTemplate.interface:type_name -> flyteidl.core.TypedInterface + 34, // 44: flyteidl.core.LaunchPlanTemplate.fixed_inputs:type_name -> flyteidl.core.LiteralMap + 45, // [45:45] is the sub-list for method output_type + 45, // [45:45] is the sub-list for method input_type + 45, // [45:45] is the sub-list for extension type_name + 45, // [45:45] is the sub-list for extension extendee + 0, // [0:45] is the sub-list for field type_name } func init() { file_flyteidl_core_workflow_proto_init() } @@ -2349,7 +2439,7 @@ func file_flyteidl_core_workflow_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_flyteidl_core_workflow_proto_rawDesc, - NumEnums: 2, + NumEnums: 3, NumMessages: 19, NumExtensions: 0, NumServices: 0, diff --git a/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go b/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go index 23f6783440..b40cea7562 100644 --- a/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go @@ -40,6 +40,8 @@ type CloudEventWorkflowExecution struct { // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. LaunchPlanId *core.Identifier `protobuf:"bytes,6,opt,name=launch_plan_id,json=launchPlanId,proto3" json:"launch_plan_id,omitempty"` + // We can't have the ExecutionMetadata object directly because of import cycle + Labels map[string]string `protobuf:"bytes,7,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *CloudEventWorkflowExecution) Reset() { @@ -116,6 +118,13 @@ func (x *CloudEventWorkflowExecution) GetLaunchPlanId() *core.Identifier { return nil } +func (x *CloudEventWorkflowExecution) GetLabels() map[string]string { + if x != nil { + return x.Labels + } + return nil +} + type CloudEventNodeExecution struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -134,6 +143,8 @@ type CloudEventNodeExecution struct { // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. LaunchPlanId *core.Identifier `protobuf:"bytes,6,opt,name=launch_plan_id,json=launchPlanId,proto3" json:"launch_plan_id,omitempty"` + // We can't have the ExecutionMetadata object directly because of import cycle + Labels map[string]string `protobuf:"bytes,7,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *CloudEventNodeExecution) Reset() { @@ -210,12 +221,21 @@ func (x *CloudEventNodeExecution) GetLaunchPlanId() *core.Identifier { return nil } +func (x *CloudEventNodeExecution) GetLabels() map[string]string { + if x != nil { + return x.Labels + } + return nil +} + type CloudEventTaskExecution struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields RawEvent *TaskExecutionEvent `protobuf:"bytes,1,opt,name=raw_event,json=rawEvent,proto3" json:"raw_event,omitempty"` + // We can't have the ExecutionMetadata object directly because of import cycle + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *CloudEventTaskExecution) Reset() { @@ -257,6 +277,13 @@ func (x *CloudEventTaskExecution) GetRawEvent() *TaskExecutionEvent { return nil } +func (x *CloudEventTaskExecution) GetLabels() map[string]string { + if x != nil { + return x.Labels + } + return nil +} + // This event is to be sent by Admin after it creates an execution. type CloudEventExecutionStart struct { state protoimpl.MessageState @@ -366,7 +393,7 @@ var file_flyteidl_event_cloudevents_proto_rawDesc = []byte{ 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xa6, 0x03, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xb2, 0x04, 0x0a, 0x1b, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, @@ -393,73 +420,99 @@ var file_flyteidl_event_cloudevents_proto_rawDesc = []byte{ 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0c, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, - 0x50, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x22, 0x8b, 0x03, 0x0a, 0x17, 0x43, 0x6c, 0x6f, 0x75, 0x64, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, - 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x08, 0x72, 0x61, 0x77, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x12, 0x48, 0x0a, 0x0c, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x65, 0x78, 0x65, 0x63, - 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, - 0x72, 0x52, 0x0a, 0x74, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x49, 0x64, 0x12, 0x48, 0x0a, - 0x10, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, - 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, - 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x64, 0x49, 0x6e, 0x74, - 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x52, 0x0f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x49, 0x6e, - 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x12, 0x3c, 0x0a, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, - 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x44, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, - 0x63, 0x74, 0x49, 0x64, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, - 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, - 0x70, 0x61, 0x6c, 0x12, 0x3f, 0x0a, 0x0e, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x5f, 0x70, 0x6c, - 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0c, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x50, 0x6c, - 0x61, 0x6e, 0x49, 0x64, 0x22, 0x5a, 0x0a, 0x17, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, - 0x3f, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x08, 0x72, 0x61, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x22, 0xef, 0x02, 0x0a, 0x18, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x45, - 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x4d, 0x0a, - 0x0c, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, - 0x0b, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x3f, 0x0a, 0x0e, - 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, - 0x0c, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x12, 0x3a, 0x0a, - 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0a, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x0c, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x50, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x12, 0x4f, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, + 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, + 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x93, 0x04, 0x0a, 0x17, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, + 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x08, 0x72, 0x61, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, + 0x48, 0x0a, 0x0c, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x5f, 0x69, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0a, 0x74, + 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x49, 0x64, 0x12, 0x48, 0x0a, 0x10, 0x6f, 0x75, 0x74, + 0x70, 0x75, 0x74, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, 0x61, + 0x63, 0x65, 0x52, 0x0f, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x66, + 0x61, 0x63, 0x65, 0x12, 0x3c, 0x0a, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, + 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, + 0x63, 0x74, 0x49, 0x44, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x64, + 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x12, + 0x3f, 0x0a, 0x0e, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, + 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, + 0x65, 0x72, 0x52, 0x0c, 0x6c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x49, 0x64, + 0x12, 0x4b, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x33, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, + 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, + 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xe2, 0x01, 0x0a, 0x17, 0x43, 0x6c, 0x6f, + 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, + 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x08, 0x72, 0x61, 0x77, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x4b, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x4c, + 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xef, 0x02, + 0x0a, 0x18, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x45, 0x78, 0x65, 0x63, + 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x4d, 0x0a, 0x0c, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0b, 0x65, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x3f, 0x0a, 0x0e, 0x6c, 0x61, 0x75, + 0x6e, 0x63, 0x68, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0c, 0x6c, 0x61, + 0x75, 0x6e, 0x63, 0x68, 0x50, 0x6c, 0x61, 0x6e, 0x49, 0x64, 0x12, 0x3a, 0x0a, 0x0b, 0x77, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x44, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, - 0x66, 0x61, 0x63, 0x74, 0x49, 0x64, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x10, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x72, 0x61, 0x63, - 0x6b, 0x65, 0x72, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, - 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, - 0x61, 0x6c, 0x42, 0xbc, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x42, 0x10, 0x43, 0x6c, 0x6f, 0x75, 0x64, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, - 0x72, 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, - 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0xa2, 0x02, 0x03, 0x46, 0x45, 0x58, - 0xaa, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0xca, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0xe2, 0x02, 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, - 0x02, 0x0f, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, + 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, + 0x63, 0x74, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x74, + 0x69, 0x66, 0x61, 0x63, 0x74, 0x49, 0x44, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, + 0x74, 0x49, 0x64, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, + 0x5f, 0x74, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x10, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x72, + 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x69, 0x6e, 0x63, 0x69, 0x70, 0x61, 0x6c, 0x42, + 0xbc, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x42, 0x10, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, + 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, + 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, + 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0xa2, 0x02, 0x03, 0x46, 0x45, 0x58, 0xaa, 0x02, 0x0e, + 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0xca, 0x02, + 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, 0x76, 0x65, 0x6e, 0x74, 0xe2, + 0x02, 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0f, 0x46, + 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -474,42 +527,48 @@ func file_flyteidl_event_cloudevents_proto_rawDescGZIP() []byte { return file_flyteidl_event_cloudevents_proto_rawDescData } -var file_flyteidl_event_cloudevents_proto_msgTypes = make([]protoimpl.MessageInfo, 4) +var file_flyteidl_event_cloudevents_proto_msgTypes = make([]protoimpl.MessageInfo, 7) var file_flyteidl_event_cloudevents_proto_goTypes = []interface{}{ (*CloudEventWorkflowExecution)(nil), // 0: flyteidl.event.CloudEventWorkflowExecution (*CloudEventNodeExecution)(nil), // 1: flyteidl.event.CloudEventNodeExecution (*CloudEventTaskExecution)(nil), // 2: flyteidl.event.CloudEventTaskExecution (*CloudEventExecutionStart)(nil), // 3: flyteidl.event.CloudEventExecutionStart - (*WorkflowExecutionEvent)(nil), // 4: flyteidl.event.WorkflowExecutionEvent - (*core.TypedInterface)(nil), // 5: flyteidl.core.TypedInterface - (*core.ArtifactID)(nil), // 6: flyteidl.core.ArtifactID - (*core.WorkflowExecutionIdentifier)(nil), // 7: flyteidl.core.WorkflowExecutionIdentifier - (*core.Identifier)(nil), // 8: flyteidl.core.Identifier - (*NodeExecutionEvent)(nil), // 9: flyteidl.event.NodeExecutionEvent - (*core.TaskExecutionIdentifier)(nil), // 10: flyteidl.core.TaskExecutionIdentifier - (*TaskExecutionEvent)(nil), // 11: flyteidl.event.TaskExecutionEvent + nil, // 4: flyteidl.event.CloudEventWorkflowExecution.LabelsEntry + nil, // 5: flyteidl.event.CloudEventNodeExecution.LabelsEntry + nil, // 6: flyteidl.event.CloudEventTaskExecution.LabelsEntry + (*WorkflowExecutionEvent)(nil), // 7: flyteidl.event.WorkflowExecutionEvent + (*core.TypedInterface)(nil), // 8: flyteidl.core.TypedInterface + (*core.ArtifactID)(nil), // 9: flyteidl.core.ArtifactID + (*core.WorkflowExecutionIdentifier)(nil), // 10: flyteidl.core.WorkflowExecutionIdentifier + (*core.Identifier)(nil), // 11: flyteidl.core.Identifier + (*NodeExecutionEvent)(nil), // 12: flyteidl.event.NodeExecutionEvent + (*core.TaskExecutionIdentifier)(nil), // 13: flyteidl.core.TaskExecutionIdentifier + (*TaskExecutionEvent)(nil), // 14: flyteidl.event.TaskExecutionEvent } var file_flyteidl_event_cloudevents_proto_depIdxs = []int32{ - 4, // 0: flyteidl.event.CloudEventWorkflowExecution.raw_event:type_name -> flyteidl.event.WorkflowExecutionEvent - 5, // 1: flyteidl.event.CloudEventWorkflowExecution.output_interface:type_name -> flyteidl.core.TypedInterface - 6, // 2: flyteidl.event.CloudEventWorkflowExecution.artifact_ids:type_name -> flyteidl.core.ArtifactID - 7, // 3: flyteidl.event.CloudEventWorkflowExecution.reference_execution:type_name -> flyteidl.core.WorkflowExecutionIdentifier - 8, // 4: flyteidl.event.CloudEventWorkflowExecution.launch_plan_id:type_name -> flyteidl.core.Identifier - 9, // 5: flyteidl.event.CloudEventNodeExecution.raw_event:type_name -> flyteidl.event.NodeExecutionEvent - 10, // 6: flyteidl.event.CloudEventNodeExecution.task_exec_id:type_name -> flyteidl.core.TaskExecutionIdentifier - 5, // 7: flyteidl.event.CloudEventNodeExecution.output_interface:type_name -> flyteidl.core.TypedInterface - 6, // 8: flyteidl.event.CloudEventNodeExecution.artifact_ids:type_name -> flyteidl.core.ArtifactID - 8, // 9: flyteidl.event.CloudEventNodeExecution.launch_plan_id:type_name -> flyteidl.core.Identifier - 11, // 10: flyteidl.event.CloudEventTaskExecution.raw_event:type_name -> flyteidl.event.TaskExecutionEvent - 7, // 11: flyteidl.event.CloudEventExecutionStart.execution_id:type_name -> flyteidl.core.WorkflowExecutionIdentifier - 8, // 12: flyteidl.event.CloudEventExecutionStart.launch_plan_id:type_name -> flyteidl.core.Identifier - 8, // 13: flyteidl.event.CloudEventExecutionStart.workflow_id:type_name -> flyteidl.core.Identifier - 6, // 14: flyteidl.event.CloudEventExecutionStart.artifact_ids:type_name -> flyteidl.core.ArtifactID - 15, // [15:15] is the sub-list for method output_type - 15, // [15:15] is the sub-list for method input_type - 15, // [15:15] is the sub-list for extension type_name - 15, // [15:15] is the sub-list for extension extendee - 0, // [0:15] is the sub-list for field type_name + 7, // 0: flyteidl.event.CloudEventWorkflowExecution.raw_event:type_name -> flyteidl.event.WorkflowExecutionEvent + 8, // 1: flyteidl.event.CloudEventWorkflowExecution.output_interface:type_name -> flyteidl.core.TypedInterface + 9, // 2: flyteidl.event.CloudEventWorkflowExecution.artifact_ids:type_name -> flyteidl.core.ArtifactID + 10, // 3: flyteidl.event.CloudEventWorkflowExecution.reference_execution:type_name -> flyteidl.core.WorkflowExecutionIdentifier + 11, // 4: flyteidl.event.CloudEventWorkflowExecution.launch_plan_id:type_name -> flyteidl.core.Identifier + 4, // 5: flyteidl.event.CloudEventWorkflowExecution.labels:type_name -> flyteidl.event.CloudEventWorkflowExecution.LabelsEntry + 12, // 6: flyteidl.event.CloudEventNodeExecution.raw_event:type_name -> flyteidl.event.NodeExecutionEvent + 13, // 7: flyteidl.event.CloudEventNodeExecution.task_exec_id:type_name -> flyteidl.core.TaskExecutionIdentifier + 8, // 8: flyteidl.event.CloudEventNodeExecution.output_interface:type_name -> flyteidl.core.TypedInterface + 9, // 9: flyteidl.event.CloudEventNodeExecution.artifact_ids:type_name -> flyteidl.core.ArtifactID + 11, // 10: flyteidl.event.CloudEventNodeExecution.launch_plan_id:type_name -> flyteidl.core.Identifier + 5, // 11: flyteidl.event.CloudEventNodeExecution.labels:type_name -> flyteidl.event.CloudEventNodeExecution.LabelsEntry + 14, // 12: flyteidl.event.CloudEventTaskExecution.raw_event:type_name -> flyteidl.event.TaskExecutionEvent + 6, // 13: flyteidl.event.CloudEventTaskExecution.labels:type_name -> flyteidl.event.CloudEventTaskExecution.LabelsEntry + 10, // 14: flyteidl.event.CloudEventExecutionStart.execution_id:type_name -> flyteidl.core.WorkflowExecutionIdentifier + 11, // 15: flyteidl.event.CloudEventExecutionStart.launch_plan_id:type_name -> flyteidl.core.Identifier + 11, // 16: flyteidl.event.CloudEventExecutionStart.workflow_id:type_name -> flyteidl.core.Identifier + 9, // 17: flyteidl.event.CloudEventExecutionStart.artifact_ids:type_name -> flyteidl.core.ArtifactID + 18, // [18:18] is the sub-list for method output_type + 18, // [18:18] is the sub-list for method input_type + 18, // [18:18] is the sub-list for extension type_name + 18, // [18:18] is the sub-list for extension extendee + 0, // [0:18] is the sub-list for field type_name } func init() { file_flyteidl_event_cloudevents_proto_init() } @@ -574,7 +633,7 @@ func file_flyteidl_event_cloudevents_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_flyteidl_event_cloudevents_proto_rawDesc, NumEnums: 0, - NumMessages: 4, + NumMessages: 7, NumExtensions: 0, NumServices: 0, }, diff --git a/flyteidl/gen/pb-go/flyteidl/event/event.pb.go b/flyteidl/gen/pb-go/flyteidl/event/event.pb.go index 963ed02ff6..096cc7fadb 100644 --- a/flyteidl/gen/pb-go/flyteidl/event/event.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/event/event.pb.go @@ -276,6 +276,8 @@ type NodeExecutionEvent struct { // if the relevant execution entity is was registered, or dynamic. This field indicates that the target_entity ID, // as well as task IDs in any corresponding Task Executions, should not be used to looked up the task in Admin's db. IsInDynamicChain bool `protobuf:"varint,24,opt,name=is_in_dynamic_chain,json=isInDynamicChain,proto3" json:"is_in_dynamic_chain,omitempty"` + // Whether this node launched an eager task. + IsEager bool `protobuf:"varint,25,opt,name=is_eager,json=isEager,proto3" json:"is_eager,omitempty"` } func (x *NodeExecutionEvent) Reset() { @@ -499,6 +501,13 @@ func (x *NodeExecutionEvent) GetIsInDynamicChain() bool { return false } +func (x *NodeExecutionEvent) GetIsEager() bool { + if x != nil { + return x.IsEager + } + return false +} + type isNodeExecutionEvent_InputValue interface { isNodeExecutionEvent_InputValue() } @@ -1228,6 +1237,16 @@ type ExternalResourceInfo struct { CacheStatus core.CatalogCacheStatus `protobuf:"varint,5,opt,name=cache_status,json=cacheStatus,proto3,enum=flyteidl.core.CatalogCacheStatus" json:"cache_status,omitempty"` // log information for the external resource execution Logs []*core.TaskLog `protobuf:"bytes,6,rep,name=logs,proto3" json:"logs,omitempty"` + // Additional metadata to do with this event's node target based on the node type. We are + // explicitly not including the task_node_metadata here because it is not clear if it is needed. + // If we decide to include in the future, we should deprecate the cache_status field. + // + // Types that are assignable to TargetMetadata: + // + // *ExternalResourceInfo_WorkflowNodeMetadata + TargetMetadata isExternalResourceInfo_TargetMetadata `protobuf_oneof:"target_metadata"` + // Extensible field for custom, plugin-specific info + CustomInfo *structpb.Struct `protobuf:"bytes,8,opt,name=custom_info,json=customInfo,proto3" json:"custom_info,omitempty"` } func (x *ExternalResourceInfo) Reset() { @@ -1304,6 +1323,37 @@ func (x *ExternalResourceInfo) GetLogs() []*core.TaskLog { return nil } +func (m *ExternalResourceInfo) GetTargetMetadata() isExternalResourceInfo_TargetMetadata { + if m != nil { + return m.TargetMetadata + } + return nil +} + +func (x *ExternalResourceInfo) GetWorkflowNodeMetadata() *WorkflowNodeMetadata { + if x, ok := x.GetTargetMetadata().(*ExternalResourceInfo_WorkflowNodeMetadata); ok { + return x.WorkflowNodeMetadata + } + return nil +} + +func (x *ExternalResourceInfo) GetCustomInfo() *structpb.Struct { + if x != nil { + return x.CustomInfo + } + return nil +} + +type isExternalResourceInfo_TargetMetadata interface { + isExternalResourceInfo_TargetMetadata() +} + +type ExternalResourceInfo_WorkflowNodeMetadata struct { + WorkflowNodeMetadata *WorkflowNodeMetadata `protobuf:"bytes,7,opt,name=workflow_node_metadata,json=workflowNodeMetadata,proto3,oneof"` +} + +func (*ExternalResourceInfo_WorkflowNodeMetadata) isExternalResourceInfo_TargetMetadata() {} + // This message holds task execution metadata specific to resource allocation used to manage concurrent // executions for a project namespace. type ResourcePoolInfo struct { @@ -1497,7 +1547,7 @@ var file_flyteidl_event_event_proto_rawDesc = []byte{ 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x42, 0x0f, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x22, 0x99, 0x0a, 0x0a, 0x12, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x22, 0xb4, 0x0a, 0x0a, 0x12, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x36, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, @@ -1575,196 +1625,208 @@ var file_flyteidl_event_event_proto_rawDesc = []byte{ 0x72, 0x67, 0x65, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x2d, 0x0a, 0x13, 0x69, 0x73, 0x5f, 0x69, 0x6e, 0x5f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x18, 0x18, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x69, 0x73, 0x49, 0x6e, 0x44, 0x79, 0x6e, - 0x61, 0x6d, 0x69, 0x63, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x42, 0x0d, 0x0a, 0x0b, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, - 0x75, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x42, 0x11, 0x0a, 0x0f, 0x74, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x65, 0x0a, 0x14, - 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x4d, 0x0a, 0x0c, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x0b, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x49, 0x64, 0x22, 0xf1, 0x02, 0x0a, 0x10, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x44, 0x0a, 0x0c, 0x63, 0x61, 0x63, 0x68, - 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, + 0x61, 0x6d, 0x69, 0x63, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x69, 0x73, 0x5f, + 0x65, 0x61, 0x67, 0x65, 0x72, 0x18, 0x19, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x69, 0x73, 0x45, + 0x61, 0x67, 0x65, 0x72, 0x42, 0x0d, 0x0a, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x42, 0x11, 0x0a, 0x0f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x65, 0x0a, 0x14, 0x57, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, + 0x4d, 0x0a, 0x0c, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, + 0x72, 0x52, 0x0b, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xf1, + 0x02, 0x0a, 0x10, 0x54, 0x61, 0x73, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x12, 0x44, 0x0a, 0x0c, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x66, 0x6c, 0x79, 0x74, + 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, + 0x67, 0x43, 0x61, 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0b, 0x63, 0x61, + 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x3f, 0x0a, 0x0b, 0x63, 0x61, 0x74, + 0x61, 0x6c, 0x6f, 0x67, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, - 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x43, 0x61, 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x52, 0x0b, 0x63, 0x61, 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x3f, - 0x0a, 0x0b, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x52, 0x0a, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x4b, 0x65, 0x79, 0x12, - 0x57, 0x0a, 0x12, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, - 0x6c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x11, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x68, 0x65, 0x63, - 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0d, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x55, 0x72, 0x69, 0x12, - 0x56, 0x0a, 0x10, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, - 0x6c, 0x6f, 0x77, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, - 0x69, 0x63, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x22, 0xce, 0x01, 0x0a, 0x1b, 0x44, 0x79, 0x6e, 0x61, - 0x6d, 0x69, 0x63, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x29, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, - 0x69, 0x64, 0x12, 0x53, 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x77, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, - 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6c, - 0x6f, 0x73, 0x75, 0x72, 0x65, 0x52, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, - 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x12, 0x2f, 0x0a, 0x14, 0x64, 0x79, 0x6e, 0x61, 0x6d, - 0x69, 0x63, 0x5f, 0x6a, 0x6f, 0x62, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x5f, 0x75, 0x72, 0x69, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4a, 0x6f, - 0x62, 0x53, 0x70, 0x65, 0x63, 0x55, 0x72, 0x69, 0x22, 0x55, 0x0a, 0x1b, 0x50, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x36, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0a, + 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x4b, 0x65, 0x79, 0x12, 0x57, 0x0a, 0x12, 0x72, 0x65, + 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x52, 0x65, + 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x52, 0x11, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x55, 0x72, 0x69, 0x12, 0x56, 0x0a, 0x10, 0x64, 0x79, + 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x18, 0x10, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, 0x6f, 0x72, + 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x52, 0x0f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x22, 0xce, 0x01, 0x0a, 0x1b, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x57, 0x6f, + 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x12, 0x29, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, + 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x12, 0x53, 0x0a, + 0x11, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x43, 0x6c, 0x6f, 0x73, 0x75, 0x72, 0x65, + 0x52, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x12, 0x2f, 0x0a, 0x14, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x5f, 0x6a, 0x6f, + 0x62, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x11, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x4a, 0x6f, 0x62, 0x53, 0x70, 0x65, 0x63, + 0x55, 0x72, 0x69, 0x22, 0x55, 0x0a, 0x1b, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x73, + 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x12, 0x36, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, + 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, + 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x22, 0x36, 0x0a, 0x1b, 0x50, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x17, 0x0a, 0x07, 0x6e, 0x6f, 0x64, + 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6e, 0x6f, 0x64, 0x65, + 0x49, 0x64, 0x22, 0x62, 0x0a, 0x0b, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x61, 0x73, 0x6f, + 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0b, 0x6f, 0x63, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x6f, 0x63, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x64, 0x41, 0x74, 0x22, 0x97, 0x08, 0x0a, 0x12, 0x54, 0x61, 0x73, 0x6b, 0x45, + 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x32, 0x0a, + 0x07, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, + 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, + 0x64, 0x12, 0x5f, 0x0a, 0x18, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x6e, 0x6f, 0x64, 0x65, + 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x02, 0x69, 0x64, 0x22, - 0x36, 0x0a, 0x1b, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x17, - 0x0a, 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x06, 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x64, 0x22, 0x62, 0x0a, 0x0b, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x3b, - 0x0a, 0x0b, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, + 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, + 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x15, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x61, 0x74, 0x74, 0x65, + 0x6d, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x72, 0x65, 0x74, 0x72, 0x79, + 0x41, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x12, 0x38, 0x0a, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, + 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x50, 0x68, 0x61, 0x73, 0x65, 0x52, 0x05, 0x70, 0x68, 0x61, 0x73, + 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, 0x5f, 0x69, 0x64, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x65, 0x72, + 0x49, 0x64, 0x12, 0x2a, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x12, 0x3b, + 0x0a, 0x0b, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x0a, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x41, 0x74, 0x22, 0x97, 0x08, 0x0a, 0x12, - 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x12, 0x32, 0x0a, 0x07, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x0a, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x41, 0x74, 0x12, 0x1d, 0x0a, 0x09, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x08, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x72, 0x69, 0x12, 0x3a, 0x0a, 0x0a, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x13, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, + 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, + 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x09, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x5f, 0x75, 0x72, 0x69, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x09, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x55, 0x72, 0x69, 0x12, 0x35, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x01, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x3c, + 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x06, - 0x74, 0x61, 0x73, 0x6b, 0x49, 0x64, 0x12, 0x5f, 0x0a, 0x18, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, - 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, - 0x52, 0x15, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x72, 0x79, - 0x5f, 0x61, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, - 0x72, 0x65, 0x74, 0x72, 0x79, 0x41, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x12, 0x38, 0x0a, 0x05, - 0x70, 0x68, 0x61, 0x73, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, - 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x50, 0x68, 0x61, 0x73, 0x65, 0x52, - 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, - 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x6f, - 0x64, 0x75, 0x63, 0x65, 0x72, 0x49, 0x64, 0x12, 0x2a, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, - 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, - 0x6f, 0x67, 0x73, 0x12, 0x3b, 0x0a, 0x0b, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x5f, - 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x48, 0x01, + 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x12, 0x38, 0x0a, 0x0b, + 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x0a, 0x63, 0x75, 0x73, 0x74, + 0x6f, 0x6d, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x68, 0x61, 0x73, 0x65, 0x5f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x70, + 0x68, 0x61, 0x73, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x06, 0x72, + 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, + 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x07, 0x72, 0x65, 0x61, 0x73, 0x6f, + 0x6e, 0x73, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, + 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x52, 0x07, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x73, 0x12, 0x1b, + 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x12, 0x41, 0x0a, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, + 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x54, + 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, + 0x0a, 0x0d, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, + 0x12, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0b, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x5f, + 0x61, 0x74, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x6f, 0x63, 0x63, 0x75, 0x72, 0x72, 0x65, 0x64, 0x41, 0x74, - 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x08, 0x20, - 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x55, 0x72, 0x69, 0x12, - 0x3a, 0x0a, 0x0a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x13, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x4d, 0x61, 0x70, 0x48, 0x00, - 0x52, 0x09, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x0a, 0x0a, 0x6f, - 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x01, 0x52, 0x09, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x55, 0x72, 0x69, 0x12, 0x35, 0x0a, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x01, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x12, 0x3c, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x64, 0x61, - 0x74, 0x61, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, - 0x4d, 0x61, 0x70, 0x48, 0x01, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x12, 0x38, 0x0a, 0x0b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x69, 0x6e, 0x66, 0x6f, - 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, - 0x0a, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x23, 0x0a, 0x0d, 0x70, - 0x68, 0x61, 0x73, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x0c, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x0c, 0x70, 0x68, 0x61, 0x73, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x12, 0x1a, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, - 0x42, 0x02, 0x18, 0x01, 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x07, - 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x73, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, - 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x52, 0x07, 0x72, 0x65, 0x61, 0x73, - 0x6f, 0x6e, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, - 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x61, 0x73, 0x6b, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x41, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x10, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, - 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x12, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0c, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0b, 0x72, 0x65, 0x70, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x72, - 0x74, 0x65, 0x64, 0x41, 0x74, 0x42, 0x0d, 0x0a, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22, 0x9e, 0x02, 0x0a, 0x14, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, - 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x1f, - 0x0a, 0x0b, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, - 0x14, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x61, - 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x72, 0x65, - 0x74, 0x72, 0x79, 0x41, 0x74, 0x74, 0x65, 0x6d, 0x70, 0x74, 0x12, 0x38, 0x0a, 0x05, 0x70, 0x68, - 0x61, 0x73, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x50, 0x68, 0x61, 0x73, 0x65, 0x52, 0x05, 0x70, - 0x68, 0x61, 0x73, 0x65, 0x12, 0x44, 0x0a, 0x0c, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x66, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, - 0x6f, 0x67, 0x43, 0x61, 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0b, 0x63, - 0x61, 0x63, 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2a, 0x0a, 0x04, 0x6c, 0x6f, - 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, - 0x69, 0x64, 0x6c, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, - 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x22, 0x5b, 0x0a, 0x10, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x29, 0x0a, 0x10, 0x61, 0x6c, - 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x22, 0x9d, 0x03, 0x0a, 0x15, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x25, 0x0a, - 0x0e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, - 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x53, 0x0a, 0x12, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x11, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x4e, 0x0a, 0x12, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x6f, 0x6f, 0x6c, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, - 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, - 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, - 0x6f, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x2b, 0x0a, 0x11, 0x70, 0x6c, 0x75, - 0x67, 0x69, 0x6e, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x49, 0x64, 0x65, 0x6e, - 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x5a, 0x0a, 0x0e, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, - 0x63, 0x65, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x33, - 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, - 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x6c, - 0x61, 0x73, 0x73, 0x52, 0x0d, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x6c, 0x61, - 0x73, 0x73, 0x22, 0x2f, 0x0a, 0x0d, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x6c, - 0x61, 0x73, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, 0x41, 0x55, 0x4c, 0x54, 0x10, 0x00, - 0x12, 0x11, 0x0a, 0x0d, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x52, 0x55, 0x50, 0x54, 0x49, 0x42, 0x4c, - 0x45, 0x10, 0x01, 0x42, 0xb6, 0x01, 0x0a, 0x12, 0x63, 0x6f, 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, - 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x42, 0x0a, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, - 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, - 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0xa2, 0x02, 0x03, 0x46, 0x45, 0x58, 0xaa, 0x02, 0x0e, 0x46, 0x6c, - 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0xca, 0x02, 0x0e, 0x46, - 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, 0x76, 0x65, 0x6e, 0x74, 0xe2, 0x02, 0x1a, - 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x5c, 0x47, - 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0f, 0x46, 0x6c, 0x79, - 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x72, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, + 0x42, 0x0d, 0x0a, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, + 0x0f, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x22, 0xc9, 0x03, 0x0a, 0x14, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x1f, 0x0a, 0x0b, 0x65, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x61, 0x74, 0x74, 0x65, 0x6d, 0x70, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x72, 0x65, 0x74, 0x72, 0x79, 0x41, 0x74, + 0x74, 0x65, 0x6d, 0x70, 0x74, 0x12, 0x38, 0x0a, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, + 0x6f, 0x6e, 0x2e, 0x50, 0x68, 0x61, 0x73, 0x65, 0x52, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x12, + 0x44, 0x0a, 0x0c, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x21, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x43, 0x61, 0x63, + 0x68, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0b, 0x63, 0x61, 0x63, 0x68, 0x65, 0x53, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x2a, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x06, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, + 0x73, 0x12, 0x5c, 0x0a, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x6f, + 0x64, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x00, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x4e, 0x6f, 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, + 0x38, 0x0a, 0x0b, 0x63, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x08, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x0a, 0x63, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x49, 0x6e, 0x66, 0x6f, 0x42, 0x11, 0x0a, 0x0f, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x5b, 0x0a, 0x10, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, + 0x12, 0x29, 0x0a, 0x10, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x61, 0x6c, 0x6c, 0x6f, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x6e, + 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x9d, 0x03, 0x0a, 0x15, 0x54, 0x61, + 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, + 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x53, 0x0a, 0x12, 0x65, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, + 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x11, 0x65, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, + 0x4e, 0x0a, 0x12, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x6f, 0x6f, 0x6c, + 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x66, 0x6c, + 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x52, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x10, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x12, + 0x2b, 0x0a, 0x11, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x66, 0x69, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x70, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x5a, 0x0a, 0x0e, + 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x18, 0x10, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x33, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x54, 0x61, 0x73, 0x6b, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x49, 0x6e, 0x73, 0x74, + 0x61, 0x6e, 0x63, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x52, 0x0d, 0x69, 0x6e, 0x73, 0x74, 0x61, + 0x6e, 0x63, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x22, 0x2f, 0x0a, 0x0d, 0x49, 0x6e, 0x73, 0x74, + 0x61, 0x6e, 0x63, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, + 0x41, 0x55, 0x4c, 0x54, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x52, + 0x55, 0x50, 0x54, 0x49, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x42, 0xb6, 0x01, 0x0a, 0x12, 0x63, 0x6f, + 0x6d, 0x2e, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x42, 0x0a, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3b, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, + 0x6f, 0x72, 0x67, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x2f, 0x66, 0x6c, 0x79, 0x74, 0x65, 0x69, + 0x64, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x66, 0x6c, 0x79, + 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0xa2, 0x02, 0x03, 0x46, 0x45, + 0x58, 0xaa, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x2e, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0xca, 0x02, 0x0e, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0xe2, 0x02, 0x1a, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x5c, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0xea, 0x02, 0x0f, 0x46, 0x6c, 0x79, 0x74, 0x65, 0x69, 0x64, 0x6c, 0x3a, 0x3a, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1854,14 +1916,16 @@ var file_flyteidl_event_event_proto_depIdxs = []int32{ 26, // 38: flyteidl.event.ExternalResourceInfo.phase:type_name -> flyteidl.core.TaskExecution.Phase 21, // 39: flyteidl.event.ExternalResourceInfo.cache_status:type_name -> flyteidl.core.CatalogCacheStatus 27, // 40: flyteidl.event.ExternalResourceInfo.logs:type_name -> flyteidl.core.TaskLog - 10, // 41: flyteidl.event.TaskExecutionMetadata.external_resources:type_name -> flyteidl.event.ExternalResourceInfo - 11, // 42: flyteidl.event.TaskExecutionMetadata.resource_pool_info:type_name -> flyteidl.event.ResourcePoolInfo - 0, // 43: flyteidl.event.TaskExecutionMetadata.instance_class:type_name -> flyteidl.event.TaskExecutionMetadata.InstanceClass - 44, // [44:44] is the sub-list for method output_type - 44, // [44:44] is the sub-list for method input_type - 44, // [44:44] is the sub-list for extension type_name - 44, // [44:44] is the sub-list for extension extendee - 0, // [0:44] is the sub-list for field type_name + 3, // 41: flyteidl.event.ExternalResourceInfo.workflow_node_metadata:type_name -> flyteidl.event.WorkflowNodeMetadata + 28, // 42: flyteidl.event.ExternalResourceInfo.custom_info:type_name -> google.protobuf.Struct + 10, // 43: flyteidl.event.TaskExecutionMetadata.external_resources:type_name -> flyteidl.event.ExternalResourceInfo + 11, // 44: flyteidl.event.TaskExecutionMetadata.resource_pool_info:type_name -> flyteidl.event.ResourcePoolInfo + 0, // 45: flyteidl.event.TaskExecutionMetadata.instance_class:type_name -> flyteidl.event.TaskExecutionMetadata.InstanceClass + 46, // [46:46] is the sub-list for method output_type + 46, // [46:46] is the sub-list for method input_type + 46, // [46:46] is the sub-list for extension type_name + 46, // [46:46] is the sub-list for extension extendee + 0, // [0:46] is the sub-list for field type_name } func init() { file_flyteidl_event_event_proto_init() } @@ -2036,6 +2100,9 @@ func file_flyteidl_event_event_proto_init() { (*TaskExecutionEvent_Error)(nil), (*TaskExecutionEvent_OutputData)(nil), } + file_flyteidl_event_event_proto_msgTypes[9].OneofWrappers = []interface{}{ + (*ExternalResourceInfo_WorkflowNodeMetadata)(nil), + } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ diff --git a/flyteidl/gen/pb-go/gateway/flyteidl/service/admin.swagger.json b/flyteidl/gen/pb-go/gateway/flyteidl/service/admin.swagger.json index c4f6f3ef7f..bb7df6af74 100644 --- a/flyteidl/gen/pb-go/gateway/flyteidl/service/admin.swagger.json +++ b/flyteidl/gen/pb-go/gateway/flyteidl/service/admin.swagger.json @@ -4249,6 +4249,15 @@ }, "title": "Sets custom attributes for a project, domain and workflow combination.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" }, + "ArrayNodeDataMode": { + "type": "string", + "enum": [ + "SINGLE_INPUT_FILE", + "INDIVIDUAL_INPUT_FILES" + ], + "default": "SINGLE_INPUT_FILE", + "description": " - SINGLE_INPUT_FILE: Indicates the ArrayNode's input is a list of input values that map to subNode executions.\nThe file path set for the subNode will be the ArrayNode's input file, but the in-memory\nvalue utilized in propeller will be the individual value for each subNode execution.\nSubNode executions need to be able to read in and parse the individual value to execute correctly.\n - INDIVIDUAL_INPUT_FILES: Indicates the ArrayNode's input is a list of input values that map to subNode executions.\nPropeller will create input files for each ArrayNode subNode by parsing the inputs and\nsetting the InputBindings on each subNodeSpec. Both the file path and in-memory input values will\nbe the individual value for each subNode execution." + }, "BlobTypeBlobDimensionality": { "type": "string", "enum": [ @@ -5705,6 +5714,10 @@ "is_array": { "type": "boolean", "description": "Boolean flag indicating if the node is an array node. This is intended to uniquely identify\narray nodes from other nodes which can have is_parent_node as true." + }, + "is_eager": { + "type": "boolean", + "description": "Whether this node is an eager node." } }, "title": "Represents additional attributes related to a Node Execution" @@ -6560,6 +6573,14 @@ "execution_mode": { "$ref": "#/definitions/coreArrayNodeExecutionMode", "description": "execution_mode determines the execution path for ArrayNode." + }, + "is_original_sub_node_interface": { + "type": "boolean", + "title": "Indicates whether the sub node's original interface was altered" + }, + "data_mode": { + "$ref": "#/definitions/ArrayNodeDataMode", + "title": "data_mode determines how input data is passed to the sub-nodes" } }, "description": "ArrayNode is a Flyte node type that simplifies the execution of a sub-node over a list of input\nvalues. An ArrayNode can be executed with configurable parallelism (separate from the parent\nworkflow) and can be configured to succeed when a certain number of sub-nodes succeed." @@ -6704,6 +6725,10 @@ "$ref": "#/definitions/coreBindingDataMap", "description": "A map of bindings. The key is always a string." }, + "offloaded_metadata": { + "$ref": "#/definitions/coreLiteralOffloadedMetadata", + "description": "Offloaded literal metadata\nWhen you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata.\nUsed for nodes that don't have promises from upstream nodes such as ArrayNode subNodes." + }, "union": { "$ref": "#/definitions/coreUnionInfo" } @@ -8269,6 +8294,10 @@ "type": "string" }, "description": "cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache." + }, + "is_eager": { + "type": "boolean", + "description": "is_eager indicates whether the task is eager or not.\nThis would be used by CreateTask endpoint." } }, "title": "Task Metadata" @@ -8664,6 +8693,13 @@ "$ref": "#/definitions/coreTaskLog" }, "title": "log information for the external resource execution" + }, + "workflow_node_metadata": { + "$ref": "#/definitions/flyteidleventWorkflowNodeMetadata" + }, + "custom_info": { + "type": "object", + "title": "Extensible field for custom, plugin-specific info" } }, "description": "This message contains metadata about external resources produced or used by a specific task execution." @@ -8764,6 +8800,10 @@ "is_in_dynamic_chain": { "type": "boolean", "description": "Tasks and subworkflows (but not launch plans) that are run within a dynamic task are effectively independent of\nthe tasks that are registered in Admin's db. Confusingly, they are often identical, but sometimes they are not\neven registered at all. Similar to the target_entity field, at the time Admin receives this event, it has no idea\nif the relevant execution entity is was registered, or dynamic. This field indicates that the target_entity ID,\nas well as task IDs in any corresponding Task Executions, should not be used to looked up the task in Admin's db." + }, + "is_eager": { + "type": "boolean", + "description": "Whether this node launched an eager task." } } }, diff --git a/flyteidl/gen/pb-go/gateway/flyteidl/service/agent.swagger.json b/flyteidl/gen/pb-go/gateway/flyteidl/service/agent.swagger.json index 070b6a8c60..4fbea61e15 100644 --- a/flyteidl/gen/pb-go/gateway/flyteidl/service/agent.swagger.json +++ b/flyteidl/gen/pb-go/gateway/flyteidl/service/agent.swagger.json @@ -466,6 +466,16 @@ "description": "- JSON: JSON / YAML for the metadata (which contains inlined primitive values). The representation is inline with the standard json specification as specified - https://www.json.org/json-en.html\n - PROTO: Proto is a serialized binary of `core.LiteralMap` defined in flyteidl/core", "title": "LiteralMapFormat decides the encoding format in which the input metadata should be made available to the containers.\nIf the user has access to the protocol buffer definitions, it is recommended to use the PROTO format.\nJSON and YAML do not need any protobuf definitions to read it\nAll remote references in core.LiteralMap are replaced with local filesystem references (the data is downloaded to local filesystem)" }, + "ExecutionErrorErrorKind": { + "type": "string", + "enum": [ + "UNKNOWN", + "USER", + "SYSTEM" + ], + "default": "UNKNOWN", + "title": "Error type: System or User" + }, "IOStrategyDownloadMode": { "type": "string", "enum": [ @@ -620,6 +630,33 @@ }, "description": "A message containing the agent metadata." }, + "adminAgentError": { + "type": "object", + "properties": { + "code": { + "type": "string", + "description": "A simplified code for errors, so that we can provide a glossary of all possible errors." + }, + "kind": { + "$ref": "#/definitions/adminAgentErrorKind", + "description": "An abstract error kind for this error. Defaults to Non_Recoverable if not specified." + }, + "origin": { + "$ref": "#/definitions/ExecutionErrorErrorKind", + "description": "Defines the origin of the error (system, user, unknown)." + } + }, + "description": "Error message to propagate detailed errors from agent executions to the execution\nengine." + }, + "adminAgentErrorKind": { + "type": "string", + "enum": [ + "NON_RECOVERABLE", + "RECOVERABLE" + ], + "default": "NON_RECOVERABLE", + "description": "Defines a generic error type that dictates the behavior of the retry strategy." + }, "adminCreateRequestHeader": { "type": "object", "properties": { @@ -817,6 +854,10 @@ "custom_info": { "type": "object", "description": "Custom data specific to the agent." + }, + "agent_error": { + "$ref": "#/definitions/adminAgentError", + "title": "The error raised during execution" } } }, @@ -1814,6 +1855,10 @@ "type": "string" }, "description": "cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache." + }, + "is_eager": { + "type": "boolean", + "description": "is_eager indicates whether the task is eager or not.\nThis would be used by CreateTask endpoint." } }, "title": "Task Metadata" diff --git a/flyteidl/gen/pb-go/gateway/flyteidl/service/external_plugin_service.swagger.json b/flyteidl/gen/pb-go/gateway/flyteidl/service/external_plugin_service.swagger.json index e690cc556c..1880700dc2 100644 --- a/flyteidl/gen/pb-go/gateway/flyteidl/service/external_plugin_service.swagger.json +++ b/flyteidl/gen/pb-go/gateway/flyteidl/service/external_plugin_service.swagger.json @@ -1063,6 +1063,10 @@ "type": "string" }, "description": "cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache." + }, + "is_eager": { + "type": "boolean", + "description": "is_eager indicates whether the task is eager or not.\nThis would be used by CreateTask endpoint." } }, "title": "Task Metadata" diff --git a/flyteidl/gen/pb-js/flyteidl.d.ts b/flyteidl/gen/pb-js/flyteidl.d.ts index 73b5a73eaa..1a5b3356c4 100644 --- a/flyteidl/gen/pb-js/flyteidl.d.ts +++ b/flyteidl/gen/pb-js/flyteidl.d.ts @@ -3797,6 +3797,9 @@ export namespace flyteidl { /** BindingData map */ map?: (flyteidl.core.IBindingDataMap|null); + /** BindingData offloadedMetadata */ + offloadedMetadata?: (flyteidl.core.ILiteralOffloadedMetadata|null); + /** BindingData union */ union?: (flyteidl.core.IUnionInfo|null); } @@ -3822,11 +3825,14 @@ export namespace flyteidl { /** BindingData map. */ public map?: (flyteidl.core.IBindingDataMap|null); + /** BindingData offloadedMetadata. */ + public offloadedMetadata?: (flyteidl.core.ILiteralOffloadedMetadata|null); + /** BindingData union. */ public union?: (flyteidl.core.IUnionInfo|null); /** BindingData value. */ - public value?: ("scalar"|"collection"|"promise"|"map"); + public value?: ("scalar"|"collection"|"promise"|"map"|"offloadedMetadata"); /** * Creates a new BindingData instance using the specified properties. @@ -4586,6 +4592,12 @@ export namespace flyteidl { /** ArrayNode executionMode */ executionMode?: (flyteidl.core.ArrayNode.ExecutionMode|null); + + /** ArrayNode isOriginalSubNodeInterface */ + isOriginalSubNodeInterface?: (google.protobuf.IBoolValue|null); + + /** ArrayNode dataMode */ + dataMode?: (flyteidl.core.ArrayNode.DataMode|null); } /** Represents an ArrayNode. */ @@ -4612,6 +4624,12 @@ export namespace flyteidl { /** ArrayNode executionMode. */ public executionMode: flyteidl.core.ArrayNode.ExecutionMode; + /** ArrayNode isOriginalSubNodeInterface. */ + public isOriginalSubNodeInterface?: (google.protobuf.IBoolValue|null); + + /** ArrayNode dataMode. */ + public dataMode: flyteidl.core.ArrayNode.DataMode; + /** ArrayNode parallelismOption. */ public parallelismOption?: "parallelism"; @@ -4658,6 +4676,12 @@ export namespace flyteidl { MINIMAL_STATE = 0, FULL_STATE = 1 } + + /** DataMode enum. */ + enum DataMode { + SINGLE_INPUT_FILE = 0, + INDIVIDUAL_INPUT_FILES = 1 + } } /** Properties of a NodeMetadata. */ @@ -6401,6 +6425,9 @@ export namespace flyteidl { /** TaskMetadata cacheIgnoreInputVars */ cacheIgnoreInputVars?: (string[]|null); + + /** TaskMetadata isEager */ + isEager?: (boolean|null); } /** Represents a TaskMetadata. */ @@ -6448,6 +6475,9 @@ export namespace flyteidl { /** TaskMetadata cacheIgnoreInputVars. */ public cacheIgnoreInputVars: string[]; + /** TaskMetadata isEager. */ + public isEager: boolean; + /** TaskMetadata interruptibleValue. */ public interruptibleValue?: "interruptible"; @@ -8070,6 +8100,9 @@ export namespace flyteidl { /** CloudEventWorkflowExecution launchPlanId */ launchPlanId?: (flyteidl.core.IIdentifier|null); + + /** CloudEventWorkflowExecution labels */ + labels?: ({ [k: string]: string }|null); } /** Represents a CloudEventWorkflowExecution. */ @@ -8099,6 +8132,9 @@ export namespace flyteidl { /** CloudEventWorkflowExecution launchPlanId. */ public launchPlanId?: (flyteidl.core.IIdentifier|null); + /** CloudEventWorkflowExecution labels. */ + public labels: { [k: string]: string }; + /** * Creates a new CloudEventWorkflowExecution instance using the specified properties. * @param [properties] Properties to set @@ -8152,6 +8188,9 @@ export namespace flyteidl { /** CloudEventNodeExecution launchPlanId */ launchPlanId?: (flyteidl.core.IIdentifier|null); + + /** CloudEventNodeExecution labels */ + labels?: ({ [k: string]: string }|null); } /** Represents a CloudEventNodeExecution. */ @@ -8181,6 +8220,9 @@ export namespace flyteidl { /** CloudEventNodeExecution launchPlanId. */ public launchPlanId?: (flyteidl.core.IIdentifier|null); + /** CloudEventNodeExecution labels. */ + public labels: { [k: string]: string }; + /** * Creates a new CloudEventNodeExecution instance using the specified properties. * @param [properties] Properties to set @@ -8219,6 +8261,9 @@ export namespace flyteidl { /** CloudEventTaskExecution rawEvent */ rawEvent?: (flyteidl.event.ITaskExecutionEvent|null); + + /** CloudEventTaskExecution labels */ + labels?: ({ [k: string]: string }|null); } /** Represents a CloudEventTaskExecution. */ @@ -8233,6 +8278,9 @@ export namespace flyteidl { /** CloudEventTaskExecution rawEvent. */ public rawEvent?: (flyteidl.event.ITaskExecutionEvent|null); + /** CloudEventTaskExecution labels. */ + public labels: { [k: string]: string }; + /** * Creates a new CloudEventTaskExecution instance using the specified properties. * @param [properties] Properties to set @@ -8513,6 +8561,9 @@ export namespace flyteidl { /** NodeExecutionEvent isInDynamicChain */ isInDynamicChain?: (boolean|null); + + /** NodeExecutionEvent isEager */ + isEager?: (boolean|null); } /** Represents a NodeExecutionEvent. */ @@ -8596,6 +8647,9 @@ export namespace flyteidl { /** NodeExecutionEvent isInDynamicChain. */ public isInDynamicChain: boolean; + /** NodeExecutionEvent isEager. */ + public isEager: boolean; + /** NodeExecutionEvent inputValue. */ public inputValue?: ("inputUri"|"inputData"); @@ -9184,6 +9238,12 @@ export namespace flyteidl { /** ExternalResourceInfo logs */ logs?: (flyteidl.core.ITaskLog[]|null); + + /** ExternalResourceInfo workflowNodeMetadata */ + workflowNodeMetadata?: (flyteidl.event.IWorkflowNodeMetadata|null); + + /** ExternalResourceInfo customInfo */ + customInfo?: (google.protobuf.IStruct|null); } /** Represents an ExternalResourceInfo. */ @@ -9213,6 +9273,15 @@ export namespace flyteidl { /** ExternalResourceInfo logs. */ public logs: flyteidl.core.ITaskLog[]; + /** ExternalResourceInfo workflowNodeMetadata. */ + public workflowNodeMetadata?: (flyteidl.event.IWorkflowNodeMetadata|null); + + /** ExternalResourceInfo customInfo. */ + public customInfo?: (google.protobuf.IStruct|null); + + /** ExternalResourceInfo targetMetadata. */ + public targetMetadata?: "workflowNodeMetadata"; + /** * Creates a new ExternalResourceInfo instance using the specified properties. * @param [properties] Properties to set @@ -10016,6 +10085,9 @@ export namespace flyteidl { /** Resource customInfo */ customInfo?: (google.protobuf.IStruct|null); + + /** Resource agentError */ + agentError?: (flyteidl.admin.IAgentError|null); } /** Represents a Resource. */ @@ -10045,6 +10117,9 @@ export namespace flyteidl { /** Resource customInfo. */ public customInfo?: (google.protobuf.IStruct|null); + /** Resource agentError. */ + public agentError?: (flyteidl.admin.IAgentError|null); + /** * Creates a new Resource instance using the specified properties. * @param [properties] Properties to set @@ -10899,6 +10974,79 @@ export namespace flyteidl { public static verify(message: { [k: string]: any }): (string|null); } + /** Properties of an AgentError. */ + interface IAgentError { + + /** AgentError code */ + code?: (string|null); + + /** AgentError kind */ + kind?: (flyteidl.admin.AgentError.Kind|null); + + /** AgentError origin */ + origin?: (flyteidl.core.ExecutionError.ErrorKind|null); + } + + /** Represents an AgentError. */ + class AgentError implements IAgentError { + + /** + * Constructs a new AgentError. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.admin.IAgentError); + + /** AgentError code. */ + public code: string; + + /** AgentError kind. */ + public kind: flyteidl.admin.AgentError.Kind; + + /** AgentError origin. */ + public origin: flyteidl.core.ExecutionError.ErrorKind; + + /** + * Creates a new AgentError instance using the specified properties. + * @param [properties] Properties to set + * @returns AgentError instance + */ + public static create(properties?: flyteidl.admin.IAgentError): flyteidl.admin.AgentError; + + /** + * Encodes the specified AgentError message. Does not implicitly {@link flyteidl.admin.AgentError.verify|verify} messages. + * @param message AgentError message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.admin.IAgentError, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an AgentError message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns AgentError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.admin.AgentError; + + /** + * Verifies an AgentError message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + + namespace AgentError { + + /** Kind enum. */ + enum Kind { + NON_RECOVERABLE = 0, + RECOVERABLE = 1 + } + } + /** Properties of a ClusterAssignment. */ interface IClusterAssignment { @@ -16958,6 +17106,9 @@ export namespace flyteidl { /** NodeExecutionMetaData isArray */ isArray?: (boolean|null); + + /** NodeExecutionMetaData isEager */ + isEager?: (boolean|null); } /** Represents a NodeExecutionMetaData. */ @@ -16984,6 +17135,9 @@ export namespace flyteidl { /** NodeExecutionMetaData isArray. */ public isArray: boolean; + /** NodeExecutionMetaData isEager. */ + public isEager: boolean; + /** * Creates a new NodeExecutionMetaData instance using the specified properties. * @param [properties] Properties to set diff --git a/flyteidl/gen/pb-js/flyteidl.js b/flyteidl/gen/pb-js/flyteidl.js index 970a69229c..b23d7f5e01 100644 --- a/flyteidl/gen/pb-js/flyteidl.js +++ b/flyteidl/gen/pb-js/flyteidl.js @@ -9102,6 +9102,7 @@ * @property {flyteidl.core.IBindingDataCollection|null} [collection] BindingData collection * @property {flyteidl.core.IOutputReference|null} [promise] BindingData promise * @property {flyteidl.core.IBindingDataMap|null} [map] BindingData map + * @property {flyteidl.core.ILiteralOffloadedMetadata|null} [offloadedMetadata] BindingData offloadedMetadata * @property {flyteidl.core.IUnionInfo|null} [union] BindingData union */ @@ -9152,6 +9153,14 @@ */ BindingData.prototype.map = null; + /** + * BindingData offloadedMetadata. + * @member {flyteidl.core.ILiteralOffloadedMetadata|null|undefined} offloadedMetadata + * @memberof flyteidl.core.BindingData + * @instance + */ + BindingData.prototype.offloadedMetadata = null; + /** * BindingData union. * @member {flyteidl.core.IUnionInfo|null|undefined} union @@ -9165,12 +9174,12 @@ /** * BindingData value. - * @member {"scalar"|"collection"|"promise"|"map"|undefined} value + * @member {"scalar"|"collection"|"promise"|"map"|"offloadedMetadata"|undefined} value * @memberof flyteidl.core.BindingData * @instance */ Object.defineProperty(BindingData.prototype, "value", { - get: $util.oneOfGetter($oneOfFields = ["scalar", "collection", "promise", "map"]), + get: $util.oneOfGetter($oneOfFields = ["scalar", "collection", "promise", "map", "offloadedMetadata"]), set: $util.oneOfSetter($oneOfFields) }); @@ -9208,6 +9217,8 @@ $root.flyteidl.core.BindingDataMap.encode(message.map, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.union != null && message.hasOwnProperty("union")) $root.flyteidl.core.UnionInfo.encode(message.union, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.offloadedMetadata != null && message.hasOwnProperty("offloadedMetadata")) + $root.flyteidl.core.LiteralOffloadedMetadata.encode(message.offloadedMetadata, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); return writer; }; @@ -9241,6 +9252,9 @@ case 4: message.map = $root.flyteidl.core.BindingDataMap.decode(reader, reader.uint32()); break; + case 6: + message.offloadedMetadata = $root.flyteidl.core.LiteralOffloadedMetadata.decode(reader, reader.uint32()); + break; case 5: message.union = $root.flyteidl.core.UnionInfo.decode(reader, reader.uint32()); break; @@ -9302,6 +9316,16 @@ return "map." + error; } } + if (message.offloadedMetadata != null && message.hasOwnProperty("offloadedMetadata")) { + if (properties.value === 1) + return "value: multiple values"; + properties.value = 1; + { + var error = $root.flyteidl.core.LiteralOffloadedMetadata.verify(message.offloadedMetadata); + if (error) + return "offloadedMetadata." + error; + } + } if (message.union != null && message.hasOwnProperty("union")) { var error = $root.flyteidl.core.UnionInfo.verify(message.union); if (error) @@ -10982,6 +11006,8 @@ * @property {number|null} [minSuccesses] ArrayNode minSuccesses * @property {number|null} [minSuccessRatio] ArrayNode minSuccessRatio * @property {flyteidl.core.ArrayNode.ExecutionMode|null} [executionMode] ArrayNode executionMode + * @property {google.protobuf.IBoolValue|null} [isOriginalSubNodeInterface] ArrayNode isOriginalSubNodeInterface + * @property {flyteidl.core.ArrayNode.DataMode|null} [dataMode] ArrayNode dataMode */ /** @@ -11039,6 +11065,22 @@ */ ArrayNode.prototype.executionMode = 0; + /** + * ArrayNode isOriginalSubNodeInterface. + * @member {google.protobuf.IBoolValue|null|undefined} isOriginalSubNodeInterface + * @memberof flyteidl.core.ArrayNode + * @instance + */ + ArrayNode.prototype.isOriginalSubNodeInterface = null; + + /** + * ArrayNode dataMode. + * @member {flyteidl.core.ArrayNode.DataMode} dataMode + * @memberof flyteidl.core.ArrayNode + * @instance + */ + ArrayNode.prototype.dataMode = 0; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -11098,6 +11140,10 @@ writer.uint32(/* id 4, wireType 5 =*/37).float(message.minSuccessRatio); if (message.executionMode != null && message.hasOwnProperty("executionMode")) writer.uint32(/* id 5, wireType 0 =*/40).int32(message.executionMode); + if (message.isOriginalSubNodeInterface != null && message.hasOwnProperty("isOriginalSubNodeInterface")) + $root.google.protobuf.BoolValue.encode(message.isOriginalSubNodeInterface, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.dataMode != null && message.hasOwnProperty("dataMode")) + writer.uint32(/* id 7, wireType 0 =*/56).int32(message.dataMode); return writer; }; @@ -11134,6 +11180,12 @@ case 5: message.executionMode = reader.int32(); break; + case 6: + message.isOriginalSubNodeInterface = $root.google.protobuf.BoolValue.decode(reader, reader.uint32()); + break; + case 7: + message.dataMode = reader.int32(); + break; default: reader.skipType(tag & 7); break; @@ -11184,6 +11236,19 @@ case 1: break; } + if (message.isOriginalSubNodeInterface != null && message.hasOwnProperty("isOriginalSubNodeInterface")) { + var error = $root.google.protobuf.BoolValue.verify(message.isOriginalSubNodeInterface); + if (error) + return "isOriginalSubNodeInterface." + error; + } + if (message.dataMode != null && message.hasOwnProperty("dataMode")) + switch (message.dataMode) { + default: + return "dataMode: enum value expected"; + case 0: + case 1: + break; + } return null; }; @@ -11201,6 +11266,20 @@ return values; })(); + /** + * DataMode enum. + * @name flyteidl.core.ArrayNode.DataMode + * @enum {string} + * @property {number} SINGLE_INPUT_FILE=0 SINGLE_INPUT_FILE value + * @property {number} INDIVIDUAL_INPUT_FILES=1 INDIVIDUAL_INPUT_FILES value + */ + ArrayNode.DataMode = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "SINGLE_INPUT_FILE"] = 0; + values[valuesById[1] = "INDIVIDUAL_INPUT_FILES"] = 1; + return values; + })(); + return ArrayNode; })(); @@ -15302,6 +15381,7 @@ * @property {Object.|null} [tags] TaskMetadata tags * @property {string|null} [podTemplateName] TaskMetadata podTemplateName * @property {Array.|null} [cacheIgnoreInputVars] TaskMetadata cacheIgnoreInputVars + * @property {boolean|null} [isEager] TaskMetadata isEager */ /** @@ -15417,6 +15497,14 @@ */ TaskMetadata.prototype.cacheIgnoreInputVars = $util.emptyArray; + /** + * TaskMetadata isEager. + * @member {boolean} isEager + * @memberof flyteidl.core.TaskMetadata + * @instance + */ + TaskMetadata.prototype.isEager = false; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -15481,6 +15569,8 @@ if (message.cacheIgnoreInputVars != null && message.cacheIgnoreInputVars.length) for (var i = 0; i < message.cacheIgnoreInputVars.length; ++i) writer.uint32(/* id 13, wireType 2 =*/106).string(message.cacheIgnoreInputVars[i]); + if (message.isEager != null && message.hasOwnProperty("isEager")) + writer.uint32(/* id 14, wireType 0 =*/112).bool(message.isEager); return writer; }; @@ -15545,6 +15635,9 @@ message.cacheIgnoreInputVars = []; message.cacheIgnoreInputVars.push(reader.string()); break; + case 14: + message.isEager = reader.bool(); + break; default: reader.skipType(tag & 7); break; @@ -15618,6 +15711,9 @@ if (!$util.isString(message.cacheIgnoreInputVars[i])) return "cacheIgnoreInputVars: string[] expected"; } + if (message.isEager != null && message.hasOwnProperty("isEager")) + if (typeof message.isEager !== "boolean") + return "isEager: boolean expected"; return null; }; @@ -19543,6 +19639,7 @@ * @property {flyteidl.core.IWorkflowExecutionIdentifier|null} [referenceExecution] CloudEventWorkflowExecution referenceExecution * @property {string|null} [principal] CloudEventWorkflowExecution principal * @property {flyteidl.core.IIdentifier|null} [launchPlanId] CloudEventWorkflowExecution launchPlanId + * @property {Object.|null} [labels] CloudEventWorkflowExecution labels */ /** @@ -19555,6 +19652,7 @@ */ function CloudEventWorkflowExecution(properties) { this.artifactIds = []; + this.labels = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -19609,6 +19707,14 @@ */ CloudEventWorkflowExecution.prototype.launchPlanId = null; + /** + * CloudEventWorkflowExecution labels. + * @member {Object.} labels + * @memberof flyteidl.event.CloudEventWorkflowExecution + * @instance + */ + CloudEventWorkflowExecution.prototype.labels = $util.emptyObject; + /** * Creates a new CloudEventWorkflowExecution instance using the specified properties. * @function create @@ -19646,6 +19752,9 @@ writer.uint32(/* id 5, wireType 2 =*/42).string(message.principal); if (message.launchPlanId != null && message.hasOwnProperty("launchPlanId")) $root.flyteidl.core.Identifier.encode(message.launchPlanId, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.labels != null && message.hasOwnProperty("labels")) + for (var keys = Object.keys(message.labels), i = 0; i < keys.length; ++i) + writer.uint32(/* id 7, wireType 2 =*/58).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.labels[keys[i]]).ldelim(); return writer; }; @@ -19663,7 +19772,7 @@ CloudEventWorkflowExecution.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.event.CloudEventWorkflowExecution(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.event.CloudEventWorkflowExecution(), key; while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { @@ -19687,6 +19796,14 @@ case 6: message.launchPlanId = $root.flyteidl.core.Identifier.decode(reader, reader.uint32()); break; + case 7: + reader.skip().pos++; + if (message.labels === $util.emptyObject) + message.labels = {}; + key = reader.string(); + reader.pos++; + message.labels[key] = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -19738,6 +19855,14 @@ if (error) return "launchPlanId." + error; } + if (message.labels != null && message.hasOwnProperty("labels")) { + if (!$util.isObject(message.labels)) + return "labels: object expected"; + var key = Object.keys(message.labels); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.labels[key[i]])) + return "labels: string{k:string} expected"; + } return null; }; @@ -19756,6 +19881,7 @@ * @property {Array.|null} [artifactIds] CloudEventNodeExecution artifactIds * @property {string|null} [principal] CloudEventNodeExecution principal * @property {flyteidl.core.IIdentifier|null} [launchPlanId] CloudEventNodeExecution launchPlanId + * @property {Object.|null} [labels] CloudEventNodeExecution labels */ /** @@ -19768,6 +19894,7 @@ */ function CloudEventNodeExecution(properties) { this.artifactIds = []; + this.labels = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -19822,6 +19949,14 @@ */ CloudEventNodeExecution.prototype.launchPlanId = null; + /** + * CloudEventNodeExecution labels. + * @member {Object.} labels + * @memberof flyteidl.event.CloudEventNodeExecution + * @instance + */ + CloudEventNodeExecution.prototype.labels = $util.emptyObject; + /** * Creates a new CloudEventNodeExecution instance using the specified properties. * @function create @@ -19859,6 +19994,9 @@ writer.uint32(/* id 5, wireType 2 =*/42).string(message.principal); if (message.launchPlanId != null && message.hasOwnProperty("launchPlanId")) $root.flyteidl.core.Identifier.encode(message.launchPlanId, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.labels != null && message.hasOwnProperty("labels")) + for (var keys = Object.keys(message.labels), i = 0; i < keys.length; ++i) + writer.uint32(/* id 7, wireType 2 =*/58).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.labels[keys[i]]).ldelim(); return writer; }; @@ -19876,7 +20014,7 @@ CloudEventNodeExecution.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.event.CloudEventNodeExecution(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.event.CloudEventNodeExecution(), key; while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { @@ -19900,6 +20038,14 @@ case 6: message.launchPlanId = $root.flyteidl.core.Identifier.decode(reader, reader.uint32()); break; + case 7: + reader.skip().pos++; + if (message.labels === $util.emptyObject) + message.labels = {}; + key = reader.string(); + reader.pos++; + message.labels[key] = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -19951,6 +20097,14 @@ if (error) return "launchPlanId." + error; } + if (message.labels != null && message.hasOwnProperty("labels")) { + if (!$util.isObject(message.labels)) + return "labels: object expected"; + var key = Object.keys(message.labels); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.labels[key[i]])) + return "labels: string{k:string} expected"; + } return null; }; @@ -19964,6 +20118,7 @@ * @memberof flyteidl.event * @interface ICloudEventTaskExecution * @property {flyteidl.event.ITaskExecutionEvent|null} [rawEvent] CloudEventTaskExecution rawEvent + * @property {Object.|null} [labels] CloudEventTaskExecution labels */ /** @@ -19975,6 +20130,7 @@ * @param {flyteidl.event.ICloudEventTaskExecution=} [properties] Properties to set */ function CloudEventTaskExecution(properties) { + this.labels = {}; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -19989,6 +20145,14 @@ */ CloudEventTaskExecution.prototype.rawEvent = null; + /** + * CloudEventTaskExecution labels. + * @member {Object.} labels + * @memberof flyteidl.event.CloudEventTaskExecution + * @instance + */ + CloudEventTaskExecution.prototype.labels = $util.emptyObject; + /** * Creates a new CloudEventTaskExecution instance using the specified properties. * @function create @@ -20015,6 +20179,9 @@ writer = $Writer.create(); if (message.rawEvent != null && message.hasOwnProperty("rawEvent")) $root.flyteidl.event.TaskExecutionEvent.encode(message.rawEvent, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + if (message.labels != null && message.hasOwnProperty("labels")) + for (var keys = Object.keys(message.labels), i = 0; i < keys.length; ++i) + writer.uint32(/* id 2, wireType 2 =*/18).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.labels[keys[i]]).ldelim(); return writer; }; @@ -20032,13 +20199,21 @@ CloudEventTaskExecution.decode = function decode(reader, length) { if (!(reader instanceof $Reader)) reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.event.CloudEventTaskExecution(); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.event.CloudEventTaskExecution(), key; while (reader.pos < end) { var tag = reader.uint32(); switch (tag >>> 3) { case 1: message.rawEvent = $root.flyteidl.event.TaskExecutionEvent.decode(reader, reader.uint32()); break; + case 2: + reader.skip().pos++; + if (message.labels === $util.emptyObject) + message.labels = {}; + key = reader.string(); + reader.pos++; + message.labels[key] = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -20063,6 +20238,14 @@ if (error) return "rawEvent." + error; } + if (message.labels != null && message.hasOwnProperty("labels")) { + if (!$util.isObject(message.labels)) + return "labels: object expected"; + var key = Object.keys(message.labels); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.labels[key[i]])) + return "labels: string{k:string} expected"; + } return null; }; @@ -20578,6 +20761,7 @@ * @property {boolean|null} [isArray] NodeExecutionEvent isArray * @property {flyteidl.core.IIdentifier|null} [targetEntity] NodeExecutionEvent targetEntity * @property {boolean|null} [isInDynamicChain] NodeExecutionEvent isInDynamicChain + * @property {boolean|null} [isEager] NodeExecutionEvent isEager */ /** @@ -20787,6 +20971,14 @@ */ NodeExecutionEvent.prototype.isInDynamicChain = false; + /** + * NodeExecutionEvent isEager. + * @member {boolean} isEager + * @memberof flyteidl.event.NodeExecutionEvent + * @instance + */ + NodeExecutionEvent.prototype.isEager = false; + // OneOf field names bound to virtual getters and setters var $oneOfFields; @@ -20895,6 +21087,8 @@ $root.flyteidl.core.Identifier.encode(message.targetEntity, writer.uint32(/* id 23, wireType 2 =*/186).fork()).ldelim(); if (message.isInDynamicChain != null && message.hasOwnProperty("isInDynamicChain")) writer.uint32(/* id 24, wireType 0 =*/192).bool(message.isInDynamicChain); + if (message.isEager != null && message.hasOwnProperty("isEager")) + writer.uint32(/* id 25, wireType 0 =*/200).bool(message.isEager); return writer; }; @@ -20988,6 +21182,9 @@ case 24: message.isInDynamicChain = reader.bool(); break; + case 25: + message.isEager = reader.bool(); + break; default: reader.skipType(tag & 7); break; @@ -21143,6 +21340,9 @@ if (message.isInDynamicChain != null && message.hasOwnProperty("isInDynamicChain")) if (typeof message.isInDynamicChain !== "boolean") return "isInDynamicChain: boolean expected"; + if (message.isEager != null && message.hasOwnProperty("isEager")) + if (typeof message.isEager !== "boolean") + return "isEager: boolean expected"; return null; }; @@ -22500,6 +22700,8 @@ * @property {flyteidl.core.TaskExecution.Phase|null} [phase] ExternalResourceInfo phase * @property {flyteidl.core.CatalogCacheStatus|null} [cacheStatus] ExternalResourceInfo cacheStatus * @property {Array.|null} [logs] ExternalResourceInfo logs + * @property {flyteidl.event.IWorkflowNodeMetadata|null} [workflowNodeMetadata] ExternalResourceInfo workflowNodeMetadata + * @property {google.protobuf.IStruct|null} [customInfo] ExternalResourceInfo customInfo */ /** @@ -22566,6 +22768,36 @@ */ ExternalResourceInfo.prototype.logs = $util.emptyArray; + /** + * ExternalResourceInfo workflowNodeMetadata. + * @member {flyteidl.event.IWorkflowNodeMetadata|null|undefined} workflowNodeMetadata + * @memberof flyteidl.event.ExternalResourceInfo + * @instance + */ + ExternalResourceInfo.prototype.workflowNodeMetadata = null; + + /** + * ExternalResourceInfo customInfo. + * @member {google.protobuf.IStruct|null|undefined} customInfo + * @memberof flyteidl.event.ExternalResourceInfo + * @instance + */ + ExternalResourceInfo.prototype.customInfo = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ExternalResourceInfo targetMetadata. + * @member {"workflowNodeMetadata"|undefined} targetMetadata + * @memberof flyteidl.event.ExternalResourceInfo + * @instance + */ + Object.defineProperty(ExternalResourceInfo.prototype, "targetMetadata", { + get: $util.oneOfGetter($oneOfFields = ["workflowNodeMetadata"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new ExternalResourceInfo instance using the specified properties. * @function create @@ -22603,6 +22835,10 @@ if (message.logs != null && message.logs.length) for (var i = 0; i < message.logs.length; ++i) $root.flyteidl.core.TaskLog.encode(message.logs[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.workflowNodeMetadata != null && message.hasOwnProperty("workflowNodeMetadata")) + $root.flyteidl.event.WorkflowNodeMetadata.encode(message.workflowNodeMetadata, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); + if (message.customInfo != null && message.hasOwnProperty("customInfo")) + $root.google.protobuf.Struct.encode(message.customInfo, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); return writer; }; @@ -22644,6 +22880,12 @@ message.logs = []; message.logs.push($root.flyteidl.core.TaskLog.decode(reader, reader.uint32())); break; + case 7: + message.workflowNodeMetadata = $root.flyteidl.event.WorkflowNodeMetadata.decode(reader, reader.uint32()); + break; + case 8: + message.customInfo = $root.google.protobuf.Struct.decode(reader, reader.uint32()); + break; default: reader.skipType(tag & 7); break; @@ -22663,6 +22905,7 @@ ExternalResourceInfo.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + var properties = {}; if (message.externalId != null && message.hasOwnProperty("externalId")) if (!$util.isString(message.externalId)) return "externalId: string expected"; @@ -22709,6 +22952,19 @@ return "logs." + error; } } + if (message.workflowNodeMetadata != null && message.hasOwnProperty("workflowNodeMetadata")) { + properties.targetMetadata = 1; + { + var error = $root.flyteidl.event.WorkflowNodeMetadata.verify(message.workflowNodeMetadata); + if (error) + return "workflowNodeMetadata." + error; + } + } + if (message.customInfo != null && message.hasOwnProperty("customInfo")) { + var error = $root.google.protobuf.Struct.verify(message.customInfo); + if (error) + return "customInfo." + error; + } return null; }; @@ -24545,6 +24801,7 @@ * @property {Array.|null} [logLinks] Resource logLinks * @property {flyteidl.core.TaskExecution.Phase|null} [phase] Resource phase * @property {google.protobuf.IStruct|null} [customInfo] Resource customInfo + * @property {flyteidl.admin.IAgentError|null} [agentError] Resource agentError */ /** @@ -24611,6 +24868,14 @@ */ Resource.prototype.customInfo = null; + /** + * Resource agentError. + * @member {flyteidl.admin.IAgentError|null|undefined} agentError + * @memberof flyteidl.admin.Resource + * @instance + */ + Resource.prototype.agentError = null; + /** * Creates a new Resource instance using the specified properties. * @function create @@ -24648,6 +24913,8 @@ writer.uint32(/* id 5, wireType 0 =*/40).int32(message.phase); if (message.customInfo != null && message.hasOwnProperty("customInfo")) $root.google.protobuf.Struct.encode(message.customInfo, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + if (message.agentError != null && message.hasOwnProperty("agentError")) + $root.flyteidl.admin.AgentError.encode(message.agentError, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); return writer; }; @@ -24689,6 +24956,9 @@ case 6: message.customInfo = $root.google.protobuf.Struct.decode(reader, reader.uint32()); break; + case 7: + message.agentError = $root.flyteidl.admin.AgentError.decode(reader, reader.uint32()); + break; default: reader.skipType(tag & 7); break; @@ -24755,6 +25025,11 @@ if (error) return "customInfo." + error; } + if (message.agentError != null && message.hasOwnProperty("agentError")) { + var error = $root.flyteidl.admin.AgentError.verify(message.agentError); + if (error) + return "agentError." + error; + } return null; }; @@ -26651,6 +26926,175 @@ return GetTaskLogsResponse; })(); + admin.AgentError = (function() { + + /** + * Properties of an AgentError. + * @memberof flyteidl.admin + * @interface IAgentError + * @property {string|null} [code] AgentError code + * @property {flyteidl.admin.AgentError.Kind|null} [kind] AgentError kind + * @property {flyteidl.core.ExecutionError.ErrorKind|null} [origin] AgentError origin + */ + + /** + * Constructs a new AgentError. + * @memberof flyteidl.admin + * @classdesc Represents an AgentError. + * @implements IAgentError + * @constructor + * @param {flyteidl.admin.IAgentError=} [properties] Properties to set + */ + function AgentError(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * AgentError code. + * @member {string} code + * @memberof flyteidl.admin.AgentError + * @instance + */ + AgentError.prototype.code = ""; + + /** + * AgentError kind. + * @member {flyteidl.admin.AgentError.Kind} kind + * @memberof flyteidl.admin.AgentError + * @instance + */ + AgentError.prototype.kind = 0; + + /** + * AgentError origin. + * @member {flyteidl.core.ExecutionError.ErrorKind} origin + * @memberof flyteidl.admin.AgentError + * @instance + */ + AgentError.prototype.origin = 0; + + /** + * Creates a new AgentError instance using the specified properties. + * @function create + * @memberof flyteidl.admin.AgentError + * @static + * @param {flyteidl.admin.IAgentError=} [properties] Properties to set + * @returns {flyteidl.admin.AgentError} AgentError instance + */ + AgentError.create = function create(properties) { + return new AgentError(properties); + }; + + /** + * Encodes the specified AgentError message. Does not implicitly {@link flyteidl.admin.AgentError.verify|verify} messages. + * @function encode + * @memberof flyteidl.admin.AgentError + * @static + * @param {flyteidl.admin.IAgentError} message AgentError message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + AgentError.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.code != null && message.hasOwnProperty("code")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.code); + if (message.kind != null && message.hasOwnProperty("kind")) + writer.uint32(/* id 3, wireType 0 =*/24).int32(message.kind); + if (message.origin != null && message.hasOwnProperty("origin")) + writer.uint32(/* id 4, wireType 0 =*/32).int32(message.origin); + return writer; + }; + + /** + * Decodes an AgentError message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.admin.AgentError + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.admin.AgentError} AgentError + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + AgentError.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.admin.AgentError(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.code = reader.string(); + break; + case 3: + message.kind = reader.int32(); + break; + case 4: + message.origin = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies an AgentError message. + * @function verify + * @memberof flyteidl.admin.AgentError + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + AgentError.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.code != null && message.hasOwnProperty("code")) + if (!$util.isString(message.code)) + return "code: string expected"; + if (message.kind != null && message.hasOwnProperty("kind")) + switch (message.kind) { + default: + return "kind: enum value expected"; + case 0: + case 1: + break; + } + if (message.origin != null && message.hasOwnProperty("origin")) + switch (message.origin) { + default: + return "origin: enum value expected"; + case 0: + case 1: + case 2: + break; + } + return null; + }; + + /** + * Kind enum. + * @name flyteidl.admin.AgentError.Kind + * @enum {string} + * @property {number} NON_RECOVERABLE=0 NON_RECOVERABLE value + * @property {number} RECOVERABLE=1 RECOVERABLE value + */ + AgentError.Kind = (function() { + var valuesById = {}, values = Object.create(valuesById); + values[valuesById[0] = "NON_RECOVERABLE"] = 0; + values[valuesById[1] = "RECOVERABLE"] = 1; + return values; + })(); + + return AgentError; + })(); + admin.ClusterAssignment = (function() { /** @@ -41057,6 +41501,7 @@ * @property {string|null} [specNodeId] NodeExecutionMetaData specNodeId * @property {boolean|null} [isDynamic] NodeExecutionMetaData isDynamic * @property {boolean|null} [isArray] NodeExecutionMetaData isArray + * @property {boolean|null} [isEager] NodeExecutionMetaData isEager */ /** @@ -41114,6 +41559,14 @@ */ NodeExecutionMetaData.prototype.isArray = false; + /** + * NodeExecutionMetaData isEager. + * @member {boolean} isEager + * @memberof flyteidl.admin.NodeExecutionMetaData + * @instance + */ + NodeExecutionMetaData.prototype.isEager = false; + /** * Creates a new NodeExecutionMetaData instance using the specified properties. * @function create @@ -41148,6 +41601,8 @@ writer.uint32(/* id 4, wireType 0 =*/32).bool(message.isDynamic); if (message.isArray != null && message.hasOwnProperty("isArray")) writer.uint32(/* id 5, wireType 0 =*/40).bool(message.isArray); + if (message.isEager != null && message.hasOwnProperty("isEager")) + writer.uint32(/* id 6, wireType 0 =*/48).bool(message.isEager); return writer; }; @@ -41184,6 +41639,9 @@ case 5: message.isArray = reader.bool(); break; + case 6: + message.isEager = reader.bool(); + break; default: reader.skipType(tag & 7); break; @@ -41218,6 +41676,9 @@ if (message.isArray != null && message.hasOwnProperty("isArray")) if (typeof message.isArray !== "boolean") return "isArray: boolean expected"; + if (message.isEager != null && message.hasOwnProperty("isEager")) + if (typeof message.isEager !== "boolean") + return "isEager: boolean expected"; return null; }; diff --git a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py index 03e181a3e1..924a7e94e6 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py @@ -23,7 +23,7 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x66lyteidl/admin/agent.proto\x12\x0e\x66lyteidl.admin\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x1c\x66lyteidl/core/workflow.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1b\x66lyteidl/core/metrics.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x9e\x07\n\x15TaskExecutionMetadata\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\x12I\n\x06labels\x18\x03 \x03(\x0b\x32\x31.flyteidl.admin.TaskExecutionMetadata.LabelsEntryR\x06labels\x12X\n\x0b\x61nnotations\x18\x04 \x03(\x0b\x32\x36.flyteidl.admin.TaskExecutionMetadata.AnnotationsEntryR\x0b\x61nnotations\x12.\n\x13k8s_service_account\x18\x05 \x01(\tR\x11k8sServiceAccount\x12t\n\x15\x65nvironment_variables\x18\x06 \x03(\x0b\x32?.flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntryR\x14\x65nvironmentVariables\x12!\n\x0cmax_attempts\x18\x07 \x01(\x05R\x0bmaxAttempts\x12$\n\rinterruptible\x18\x08 \x01(\x08R\rinterruptible\x12\x46\n\x1finterruptible_failure_threshold\x18\t \x01(\x05R\x1dinterruptibleFailureThreshold\x12>\n\toverrides\x18\n \x01(\x0b\x32 .flyteidl.core.TaskNodeOverridesR\toverrides\x12\x33\n\x08identity\x18\x0b \x01(\x0b\x32\x17.flyteidl.core.IdentityR\x08identity\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a>\n\x10\x41nnotationsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1aG\n\x19\x45nvironmentVariablesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x83\x02\n\x11\x43reateTaskRequest\x12\x31\n\x06inputs\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x37\n\x08template\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x03 \x01(\tR\x0coutputPrefix\x12]\n\x17task_execution_metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.TaskExecutionMetadataR\x15taskExecutionMetadata\"9\n\x12\x43reateTaskResponse\x12#\n\rresource_meta\x18\x01 \x01(\x0cR\x0cresourceMeta\"\x87\x02\n\x13\x43reateRequestHeader\x12\x37\n\x08template\x18\x01 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x02 \x01(\tR\x0coutputPrefix\x12]\n\x17task_execution_metadata\x18\x03 \x01(\x0b\x32%.flyteidl.admin.TaskExecutionMetadataR\x15taskExecutionMetadata\x12\x33\n\x16max_dataset_size_bytes\x18\x04 \x01(\x03R\x13maxDatasetSizeBytes\"\x94\x01\n\x16\x45xecuteTaskSyncRequest\x12=\n\x06header\x18\x01 \x01(\x0b\x32#.flyteidl.admin.CreateRequestHeaderH\x00R\x06header\x12\x33\n\x06inputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\x06inputsB\x06\n\x04part\"U\n\x1d\x45xecuteTaskSyncResponseHeader\x12\x34\n\x08resource\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ResourceR\x08resource\"\xa0\x01\n\x17\x45xecuteTaskSyncResponse\x12G\n\x06header\x18\x01 \x01(\x0b\x32-.flyteidl.admin.ExecuteTaskSyncResponseHeaderH\x00R\x06header\x12\x35\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\x07outputsB\x05\n\x03res\"\x99\x01\n\x0eGetTaskRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x41\n\rtask_category\x18\x03 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"G\n\x0fGetTaskResponse\x12\x34\n\x08resource\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ResourceR\x08resource\"\xb3\x02\n\x08Resource\x12/\n\x05state\x18\x01 \x01(\x0e\x32\x15.flyteidl.admin.StateB\x02\x18\x01R\x05state\x12\x33\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x07outputs\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x33\n\tlog_links\x18\x04 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x08logLinks\x12\x38\n\x05phase\x18\x05 \x01(\x0e\x32\".flyteidl.core.TaskExecution.PhaseR\x05phase\x12\x38\n\x0b\x63ustom_info\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\ncustomInfo\"\x9c\x01\n\x11\x44\x65leteTaskRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x41\n\rtask_category\x18\x03 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"\x14\n\x12\x44\x65leteTaskResponse\"\xc4\x01\n\x05\x41gent\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x34\n\x14supported_task_types\x18\x02 \x03(\tB\x02\x18\x01R\x12supportedTaskTypes\x12\x17\n\x07is_sync\x18\x03 \x01(\x08R\x06isSync\x12X\n\x19supported_task_categories\x18\x04 \x03(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x17supportedTaskCategories\"<\n\x0cTaskCategory\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07version\x18\x02 \x01(\x05R\x07version\"%\n\x0fGetAgentRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"?\n\x10GetAgentResponse\x12+\n\x05\x61gent\x18\x01 \x01(\x0b\x32\x15.flyteidl.admin.AgentR\x05\x61gent\"\x13\n\x11ListAgentsRequest\"C\n\x12ListAgentsResponse\x12-\n\x06\x61gents\x18\x01 \x03(\x0b\x32\x15.flyteidl.admin.AgentR\x06\x61gents\"\xdb\x02\n\x15GetTaskMetricsRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x18\n\x07queries\x18\x03 \x03(\tR\x07queries\x12\x39\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12-\n\x04step\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x04step\x12\x41\n\rtask_category\x18\x07 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"X\n\x16GetTaskMetricsResponse\x12>\n\x07results\x18\x01 \x03(\x0b\x32$.flyteidl.core.ExecutionMetricResultR\x07results\"\xc9\x01\n\x12GetTaskLogsRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x14\n\x05lines\x18\x03 \x01(\x04R\x05lines\x12\x14\n\x05token\x18\x04 \x01(\tR\x05token\x12\x41\n\rtask_category\x18\x05 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"1\n\x19GetTaskLogsResponseHeader\x12\x14\n\x05token\x18\x01 \x01(\tR\x05token\"3\n\x17GetTaskLogsResponseBody\x12\x18\n\x07results\x18\x01 \x03(\tR\x07results\"\xa1\x01\n\x13GetTaskLogsResponse\x12\x43\n\x06header\x18\x01 \x01(\x0b\x32).flyteidl.admin.GetTaskLogsResponseHeaderH\x00R\x06header\x12=\n\x04\x62ody\x18\x02 \x01(\x0b\x32\'.flyteidl.admin.GetTaskLogsResponseBodyH\x00R\x04\x62odyB\x06\n\x04part*b\n\x05State\x12\x15\n\x11RETRYABLE_FAILURE\x10\x00\x12\x15\n\x11PERMANENT_FAILURE\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x1a\x02\x18\x01\x42\xb6\x01\n\x12\x63om.flyteidl.adminB\nAgentProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x66lyteidl/admin/agent.proto\x12\x0e\x66lyteidl.admin\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x1c\x66lyteidl/core/workflow.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1b\x66lyteidl/core/metrics.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x9e\x07\n\x15TaskExecutionMetadata\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\x12I\n\x06labels\x18\x03 \x03(\x0b\x32\x31.flyteidl.admin.TaskExecutionMetadata.LabelsEntryR\x06labels\x12X\n\x0b\x61nnotations\x18\x04 \x03(\x0b\x32\x36.flyteidl.admin.TaskExecutionMetadata.AnnotationsEntryR\x0b\x61nnotations\x12.\n\x13k8s_service_account\x18\x05 \x01(\tR\x11k8sServiceAccount\x12t\n\x15\x65nvironment_variables\x18\x06 \x03(\x0b\x32?.flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntryR\x14\x65nvironmentVariables\x12!\n\x0cmax_attempts\x18\x07 \x01(\x05R\x0bmaxAttempts\x12$\n\rinterruptible\x18\x08 \x01(\x08R\rinterruptible\x12\x46\n\x1finterruptible_failure_threshold\x18\t \x01(\x05R\x1dinterruptibleFailureThreshold\x12>\n\toverrides\x18\n \x01(\x0b\x32 .flyteidl.core.TaskNodeOverridesR\toverrides\x12\x33\n\x08identity\x18\x0b \x01(\x0b\x32\x17.flyteidl.core.IdentityR\x08identity\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a>\n\x10\x41nnotationsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1aG\n\x19\x45nvironmentVariablesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x83\x02\n\x11\x43reateTaskRequest\x12\x31\n\x06inputs\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x37\n\x08template\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x03 \x01(\tR\x0coutputPrefix\x12]\n\x17task_execution_metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.TaskExecutionMetadataR\x15taskExecutionMetadata\"9\n\x12\x43reateTaskResponse\x12#\n\rresource_meta\x18\x01 \x01(\x0cR\x0cresourceMeta\"\x87\x02\n\x13\x43reateRequestHeader\x12\x37\n\x08template\x18\x01 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x02 \x01(\tR\x0coutputPrefix\x12]\n\x17task_execution_metadata\x18\x03 \x01(\x0b\x32%.flyteidl.admin.TaskExecutionMetadataR\x15taskExecutionMetadata\x12\x33\n\x16max_dataset_size_bytes\x18\x04 \x01(\x03R\x13maxDatasetSizeBytes\"\x94\x01\n\x16\x45xecuteTaskSyncRequest\x12=\n\x06header\x18\x01 \x01(\x0b\x32#.flyteidl.admin.CreateRequestHeaderH\x00R\x06header\x12\x33\n\x06inputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\x06inputsB\x06\n\x04part\"U\n\x1d\x45xecuteTaskSyncResponseHeader\x12\x34\n\x08resource\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ResourceR\x08resource\"\xa0\x01\n\x17\x45xecuteTaskSyncResponse\x12G\n\x06header\x18\x01 \x01(\x0b\x32-.flyteidl.admin.ExecuteTaskSyncResponseHeaderH\x00R\x06header\x12\x35\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\x07outputsB\x05\n\x03res\"\x99\x01\n\x0eGetTaskRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x41\n\rtask_category\x18\x03 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"G\n\x0fGetTaskResponse\x12\x34\n\x08resource\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ResourceR\x08resource\"\xf0\x02\n\x08Resource\x12/\n\x05state\x18\x01 \x01(\x0e\x32\x15.flyteidl.admin.StateB\x02\x18\x01R\x05state\x12\x33\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x07outputs\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x33\n\tlog_links\x18\x04 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x08logLinks\x12\x38\n\x05phase\x18\x05 \x01(\x0e\x32\".flyteidl.core.TaskExecution.PhaseR\x05phase\x12\x38\n\x0b\x63ustom_info\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\ncustomInfo\x12;\n\x0b\x61gent_error\x18\x07 \x01(\x0b\x32\x1a.flyteidl.admin.AgentErrorR\nagentError\"\x9c\x01\n\x11\x44\x65leteTaskRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x41\n\rtask_category\x18\x03 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"\x14\n\x12\x44\x65leteTaskResponse\"\xc4\x01\n\x05\x41gent\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x34\n\x14supported_task_types\x18\x02 \x03(\tB\x02\x18\x01R\x12supportedTaskTypes\x12\x17\n\x07is_sync\x18\x03 \x01(\x08R\x06isSync\x12X\n\x19supported_task_categories\x18\x04 \x03(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x17supportedTaskCategories\"<\n\x0cTaskCategory\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07version\x18\x02 \x01(\x05R\x07version\"%\n\x0fGetAgentRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"?\n\x10GetAgentResponse\x12+\n\x05\x61gent\x18\x01 \x01(\x0b\x32\x15.flyteidl.admin.AgentR\x05\x61gent\"\x13\n\x11ListAgentsRequest\"C\n\x12ListAgentsResponse\x12-\n\x06\x61gents\x18\x01 \x03(\x0b\x32\x15.flyteidl.admin.AgentR\x06\x61gents\"\xdb\x02\n\x15GetTaskMetricsRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x18\n\x07queries\x18\x03 \x03(\tR\x07queries\x12\x39\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12-\n\x04step\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x04step\x12\x41\n\rtask_category\x18\x07 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"X\n\x16GetTaskMetricsResponse\x12>\n\x07results\x18\x01 \x03(\x0b\x32$.flyteidl.core.ExecutionMetricResultR\x07results\"\xc9\x01\n\x12GetTaskLogsRequest\x12\x1f\n\ttask_type\x18\x01 \x01(\tB\x02\x18\x01R\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x14\n\x05lines\x18\x03 \x01(\x04R\x05lines\x12\x14\n\x05token\x18\x04 \x01(\tR\x05token\x12\x41\n\rtask_category\x18\x05 \x01(\x0b\x32\x1c.flyteidl.admin.TaskCategoryR\x0ctaskCategory\"1\n\x19GetTaskLogsResponseHeader\x12\x14\n\x05token\x18\x01 \x01(\tR\x05token\"3\n\x17GetTaskLogsResponseBody\x12\x18\n\x07results\x18\x01 \x03(\tR\x07results\"\xa1\x01\n\x13GetTaskLogsResponse\x12\x43\n\x06header\x18\x01 \x01(\x0b\x32).flyteidl.admin.GetTaskLogsResponseHeaderH\x00R\x06header\x12=\n\x04\x62ody\x18\x02 \x01(\x0b\x32\'.flyteidl.admin.GetTaskLogsResponseBodyH\x00R\x04\x62odyB\x06\n\x04part\"\xc4\x01\n\nAgentError\x12\x12\n\x04\x63ode\x18\x01 \x01(\tR\x04\x63ode\x12\x33\n\x04kind\x18\x03 \x01(\x0e\x32\x1f.flyteidl.admin.AgentError.KindR\x04kind\x12?\n\x06origin\x18\x04 \x01(\x0e\x32\'.flyteidl.core.ExecutionError.ErrorKindR\x06origin\",\n\x04Kind\x12\x13\n\x0fNON_RECOVERABLE\x10\x00\x12\x0f\n\x0bRECOVERABLE\x10\x01*b\n\x05State\x12\x15\n\x11RETRYABLE_FAILURE\x10\x00\x12\x15\n\x11PERMANENT_FAILURE\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x1a\x02\x18\x01\x42\xb6\x01\n\x12\x63om.flyteidl.adminB\nAgentProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -52,8 +52,8 @@ _GETTASKMETRICSREQUEST.fields_by_name['task_type']._serialized_options = b'\030\001' _GETTASKLOGSREQUEST.fields_by_name['task_type']._options = None _GETTASKLOGSREQUEST.fields_by_name['task_type']._serialized_options = b'\030\001' - _globals['_STATE']._serialized_start=4354 - _globals['_STATE']._serialized_end=4452 + _globals['_STATE']._serialized_start=4614 + _globals['_STATE']._serialized_end=4712 _globals['_TASKEXECUTIONMETADATA']._serialized_start=351 _globals['_TASKEXECUTIONMETADATA']._serialized_end=1277 _globals['_TASKEXECUTIONMETADATA_LABELSENTRY']._serialized_start=1083 @@ -79,33 +79,37 @@ _globals['_GETTASKRESPONSE']._serialized_start=2423 _globals['_GETTASKRESPONSE']._serialized_end=2494 _globals['_RESOURCE']._serialized_start=2497 - _globals['_RESOURCE']._serialized_end=2804 - _globals['_DELETETASKREQUEST']._serialized_start=2807 - _globals['_DELETETASKREQUEST']._serialized_end=2963 - _globals['_DELETETASKRESPONSE']._serialized_start=2965 - _globals['_DELETETASKRESPONSE']._serialized_end=2985 - _globals['_AGENT']._serialized_start=2988 - _globals['_AGENT']._serialized_end=3184 - _globals['_TASKCATEGORY']._serialized_start=3186 - _globals['_TASKCATEGORY']._serialized_end=3246 - _globals['_GETAGENTREQUEST']._serialized_start=3248 - _globals['_GETAGENTREQUEST']._serialized_end=3285 - _globals['_GETAGENTRESPONSE']._serialized_start=3287 - _globals['_GETAGENTRESPONSE']._serialized_end=3350 - _globals['_LISTAGENTSREQUEST']._serialized_start=3352 - _globals['_LISTAGENTSREQUEST']._serialized_end=3371 - _globals['_LISTAGENTSRESPONSE']._serialized_start=3373 - _globals['_LISTAGENTSRESPONSE']._serialized_end=3440 - _globals['_GETTASKMETRICSREQUEST']._serialized_start=3443 - _globals['_GETTASKMETRICSREQUEST']._serialized_end=3790 - _globals['_GETTASKMETRICSRESPONSE']._serialized_start=3792 - _globals['_GETTASKMETRICSRESPONSE']._serialized_end=3880 - _globals['_GETTASKLOGSREQUEST']._serialized_start=3883 - _globals['_GETTASKLOGSREQUEST']._serialized_end=4084 - _globals['_GETTASKLOGSRESPONSEHEADER']._serialized_start=4086 - _globals['_GETTASKLOGSRESPONSEHEADER']._serialized_end=4135 - _globals['_GETTASKLOGSRESPONSEBODY']._serialized_start=4137 - _globals['_GETTASKLOGSRESPONSEBODY']._serialized_end=4188 - _globals['_GETTASKLOGSRESPONSE']._serialized_start=4191 - _globals['_GETTASKLOGSRESPONSE']._serialized_end=4352 + _globals['_RESOURCE']._serialized_end=2865 + _globals['_DELETETASKREQUEST']._serialized_start=2868 + _globals['_DELETETASKREQUEST']._serialized_end=3024 + _globals['_DELETETASKRESPONSE']._serialized_start=3026 + _globals['_DELETETASKRESPONSE']._serialized_end=3046 + _globals['_AGENT']._serialized_start=3049 + _globals['_AGENT']._serialized_end=3245 + _globals['_TASKCATEGORY']._serialized_start=3247 + _globals['_TASKCATEGORY']._serialized_end=3307 + _globals['_GETAGENTREQUEST']._serialized_start=3309 + _globals['_GETAGENTREQUEST']._serialized_end=3346 + _globals['_GETAGENTRESPONSE']._serialized_start=3348 + _globals['_GETAGENTRESPONSE']._serialized_end=3411 + _globals['_LISTAGENTSREQUEST']._serialized_start=3413 + _globals['_LISTAGENTSREQUEST']._serialized_end=3432 + _globals['_LISTAGENTSRESPONSE']._serialized_start=3434 + _globals['_LISTAGENTSRESPONSE']._serialized_end=3501 + _globals['_GETTASKMETRICSREQUEST']._serialized_start=3504 + _globals['_GETTASKMETRICSREQUEST']._serialized_end=3851 + _globals['_GETTASKMETRICSRESPONSE']._serialized_start=3853 + _globals['_GETTASKMETRICSRESPONSE']._serialized_end=3941 + _globals['_GETTASKLOGSREQUEST']._serialized_start=3944 + _globals['_GETTASKLOGSREQUEST']._serialized_end=4145 + _globals['_GETTASKLOGSRESPONSEHEADER']._serialized_start=4147 + _globals['_GETTASKLOGSRESPONSEHEADER']._serialized_end=4196 + _globals['_GETTASKLOGSRESPONSEBODY']._serialized_start=4198 + _globals['_GETTASKLOGSRESPONSEBODY']._serialized_end=4249 + _globals['_GETTASKLOGSRESPONSE']._serialized_start=4252 + _globals['_GETTASKLOGSRESPONSE']._serialized_end=4413 + _globals['_AGENTERROR']._serialized_start=4416 + _globals['_AGENTERROR']._serialized_end=4612 + _globals['_AGENTERROR_KIND']._serialized_start=4568 + _globals['_AGENTERROR_KIND']._serialized_end=4612 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi index 956b5d5a4d..d4243c2738 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi @@ -145,20 +145,22 @@ class GetTaskResponse(_message.Message): def __init__(self, resource: _Optional[_Union[Resource, _Mapping]] = ...) -> None: ... class Resource(_message.Message): - __slots__ = ["state", "outputs", "message", "log_links", "phase", "custom_info"] + __slots__ = ["state", "outputs", "message", "log_links", "phase", "custom_info", "agent_error"] STATE_FIELD_NUMBER: _ClassVar[int] OUTPUTS_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] LOG_LINKS_FIELD_NUMBER: _ClassVar[int] PHASE_FIELD_NUMBER: _ClassVar[int] CUSTOM_INFO_FIELD_NUMBER: _ClassVar[int] + AGENT_ERROR_FIELD_NUMBER: _ClassVar[int] state: State outputs: _literals_pb2.LiteralMap message: str log_links: _containers.RepeatedCompositeFieldContainer[_execution_pb2.TaskLog] phase: _execution_pb2.TaskExecution.Phase custom_info: _struct_pb2.Struct - def __init__(self, state: _Optional[_Union[State, str]] = ..., outputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., message: _Optional[str] = ..., log_links: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., phase: _Optional[_Union[_execution_pb2.TaskExecution.Phase, str]] = ..., custom_info: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + agent_error: AgentError + def __init__(self, state: _Optional[_Union[State, str]] = ..., outputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., message: _Optional[str] = ..., log_links: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., phase: _Optional[_Union[_execution_pb2.TaskExecution.Phase, str]] = ..., custom_info: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., agent_error: _Optional[_Union[AgentError, _Mapping]] = ...) -> None: ... class DeleteTaskRequest(_message.Message): __slots__ = ["task_type", "resource_meta", "task_category"] @@ -273,3 +275,19 @@ class GetTaskLogsResponse(_message.Message): header: GetTaskLogsResponseHeader body: GetTaskLogsResponseBody def __init__(self, header: _Optional[_Union[GetTaskLogsResponseHeader, _Mapping]] = ..., body: _Optional[_Union[GetTaskLogsResponseBody, _Mapping]] = ...) -> None: ... + +class AgentError(_message.Message): + __slots__ = ["code", "kind", "origin"] + class Kind(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + NON_RECOVERABLE: _ClassVar[AgentError.Kind] + RECOVERABLE: _ClassVar[AgentError.Kind] + NON_RECOVERABLE: AgentError.Kind + RECOVERABLE: AgentError.Kind + CODE_FIELD_NUMBER: _ClassVar[int] + KIND_FIELD_NUMBER: _ClassVar[int] + ORIGIN_FIELD_NUMBER: _ClassVar[int] + code: str + kind: AgentError.Kind + origin: _execution_pb2.ExecutionError.ErrorKind + def __init__(self, code: _Optional[str] = ..., kind: _Optional[_Union[AgentError.Kind, str]] = ..., origin: _Optional[_Union[_execution_pb2.ExecutionError.ErrorKind, str]] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py index ff650d4c55..ddc0b22799 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py @@ -26,7 +26,7 @@ from flyteidl.admin import matchable_resource_pb2 as flyteidl_dot_admin_dot_matchable__resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66lyteidl/admin/execution.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/cluster_assignment.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\"flyteidl/core/execution_envs.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1b\x66lyteidl/core/metrics.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\'flyteidl/admin/matchable_resource.proto\"\xd6\x01\n\x16\x45xecutionCreateRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x31\n\x04spec\x18\x04 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12\x31\n\x06inputs\x18\x05 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x10\n\x03org\x18\x06 \x01(\tR\x03org\"\x99\x01\n\x18\x45xecutionRelaunchRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\'\n\x0foverwrite_cache\x18\x04 \x01(\x08R\x0eoverwriteCacheJ\x04\x08\x02\x10\x03\"\xa8\x01\n\x17\x45xecutionRecoverRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\"U\n\x17\x45xecutionCreateResponse\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"Y\n\x1bWorkflowExecutionGetRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\xb6\x01\n\tExecution\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x31\n\x04spec\x18\x02 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12:\n\x07\x63losure\x18\x03 \x01(\x0b\x32 .flyteidl.admin.ExecutionClosureR\x07\x63losure\"`\n\rExecutionList\x12\x39\n\nexecutions\x18\x01 \x03(\x0b\x32\x19.flyteidl.admin.ExecutionR\nexecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"e\n\x0eLiteralMapBlob\x12\x37\n\x06values\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\x06values\x12\x12\n\x03uri\x18\x02 \x01(\tH\x00R\x03uriB\x06\n\x04\x64\x61ta\"C\n\rAbortMetadata\x12\x14\n\x05\x63\x61use\x18\x01 \x01(\tR\x05\x63\x61use\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\"\x98\x07\n\x10\x45xecutionClosure\x12>\n\x07outputs\x18\x01 \x01(\x0b\x32\x1e.flyteidl.admin.LiteralMapBlobB\x02\x18\x01H\x00R\x07outputs\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12%\n\x0b\x61\x62ort_cause\x18\n \x01(\tB\x02\x18\x01H\x00R\nabortCause\x12\x46\n\x0e\x61\x62ort_metadata\x18\x0c \x01(\x0b\x32\x1d.flyteidl.admin.AbortMetadataH\x00R\rabortMetadata\x12@\n\x0boutput_data\x18\r \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x46\n\x0f\x63omputed_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x0e\x63omputedInputs\x12<\n\x05phase\x18\x04 \x01(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x42\n\rnotifications\x18\t \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\x12:\n\x0bworkflow_id\x18\x0b \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12]\n\x14state_change_details\x18\x0e \x01(\x0b\x32+.flyteidl.admin.ExecutionStateChangeDetailsR\x12stateChangeDetailsB\x0f\n\routput_result\"[\n\x0eSystemMetadata\x12+\n\x11\x65xecution_cluster\x18\x01 \x01(\tR\x10\x65xecutionCluster\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\"\x85\x05\n\x11\x45xecutionMetadata\x12\x43\n\x04mode\x18\x01 \x01(\x0e\x32/.flyteidl.admin.ExecutionMetadata.ExecutionModeR\x04mode\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\x12\x18\n\x07nesting\x18\x03 \x01(\rR\x07nesting\x12=\n\x0cscheduled_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0bscheduledAt\x12Z\n\x15parent_node_execution\x18\x05 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x13parentNodeExecution\x12[\n\x13reference_execution\x18\x10 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12G\n\x0fsystem_metadata\x18\x11 \x01(\x0b\x32\x1e.flyteidl.admin.SystemMetadataR\x0esystemMetadata\x12<\n\x0c\x61rtifact_ids\x18\x12 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\"t\n\rExecutionMode\x12\n\n\x06MANUAL\x10\x00\x12\r\n\tSCHEDULED\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\x0c\n\x08RELAUNCH\x10\x03\x12\x12\n\x0e\x43HILD_WORKFLOW\x10\x04\x12\r\n\tRECOVERED\x10\x05\x12\x0b\n\x07TRIGGER\x10\x06\"V\n\x10NotificationList\x12\x42\n\rnotifications\x18\x01 \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\"\xd6\t\n\rExecutionSpec\x12:\n\x0blaunch_plan\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nlaunchPlan\x12\x35\n\x06inputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x06inputs\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\x12H\n\rnotifications\x18\x05 \x01(\x0b\x32 .flyteidl.admin.NotificationListH\x00R\rnotifications\x12!\n\x0b\x64isable_all\x18\x06 \x01(\x08H\x00R\ndisableAll\x12.\n\x06labels\x18\x07 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x08 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12I\n\x10security_context\x18\n \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12\x39\n\tauth_role\x18\x10 \x01(\x0b\x32\x18.flyteidl.admin.AuthRoleB\x02\x18\x01R\x08\x61uthRole\x12M\n\x12quality_of_service\x18\x11 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12\'\n\x0fmax_parallelism\x18\x12 \x01(\x05R\x0emaxParallelism\x12X\n\x16raw_output_data_config\x18\x13 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12P\n\x12\x63luster_assignment\x18\x14 \x01(\x0b\x32!.flyteidl.admin.ClusterAssignmentR\x11\x63lusterAssignment\x12@\n\rinterruptible\x18\x15 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x16 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x17 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\x12\x16\n\x04tags\x18\x18 \x03(\tB\x02\x18\x01R\x04tags\x12]\n\x17\x65xecution_cluster_label\x18\x19 \x01(\x0b\x32%.flyteidl.admin.ExecutionClusterLabelR\x15\x65xecutionClusterLabel\x12\x61\n\x19\x65xecution_env_assignments\x18\x1a \x03(\x0b\x32%.flyteidl.core.ExecutionEnvAssignmentR\x17\x65xecutionEnvAssignmentsB\x18\n\x16notification_overridesJ\x04\x08\x04\x10\x05\"m\n\x19\x45xecutionTerminateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x63\x61use\x18\x02 \x01(\tR\x05\x63\x61use\"\x1c\n\x1a\x45xecutionTerminateResponse\"]\n\x1fWorkflowExecutionGetDataRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\x88\x02\n WorkflowExecutionGetDataResponse\x12\x35\n\x07outputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12\x33\n\x06inputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\"\x8a\x01\n\x16\x45xecutionUpdateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x34\n\x05state\x18\x02 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\"\xae\x01\n\x1b\x45xecutionStateChangeDetails\x12\x34\n\x05state\x18\x01 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\x12;\n\x0boccurred_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1c\n\tprincipal\x18\x03 \x01(\tR\tprincipal\"\x19\n\x17\x45xecutionUpdateResponse\"v\n\"WorkflowExecutionGetMetricsRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x64\x65pth\x18\x02 \x01(\x05R\x05\x64\x65pth\"N\n#WorkflowExecutionGetMetricsResponse\x12\'\n\x04span\x18\x01 \x01(\x0b\x32\x13.flyteidl.core.SpanR\x04span*>\n\x0e\x45xecutionState\x12\x14\n\x10\x45XECUTION_ACTIVE\x10\x00\x12\x16\n\x12\x45XECUTION_ARCHIVED\x10\x01\x42\xba\x01\n\x12\x63om.flyteidl.adminB\x0e\x45xecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66lyteidl/admin/execution.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/cluster_assignment.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\"flyteidl/core/execution_envs.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1b\x66lyteidl/core/metrics.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\'flyteidl/admin/matchable_resource.proto\"\xd6\x01\n\x16\x45xecutionCreateRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x31\n\x04spec\x18\x04 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12\x31\n\x06inputs\x18\x05 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x10\n\x03org\x18\x06 \x01(\tR\x03org\"\x99\x01\n\x18\x45xecutionRelaunchRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\'\n\x0foverwrite_cache\x18\x04 \x01(\x08R\x0eoverwriteCacheJ\x04\x08\x02\x10\x03\"\xa8\x01\n\x17\x45xecutionRecoverRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\"U\n\x17\x45xecutionCreateResponse\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"Y\n\x1bWorkflowExecutionGetRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\xb6\x01\n\tExecution\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x31\n\x04spec\x18\x02 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12:\n\x07\x63losure\x18\x03 \x01(\x0b\x32 .flyteidl.admin.ExecutionClosureR\x07\x63losure\"`\n\rExecutionList\x12\x39\n\nexecutions\x18\x01 \x03(\x0b\x32\x19.flyteidl.admin.ExecutionR\nexecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"e\n\x0eLiteralMapBlob\x12\x37\n\x06values\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\x06values\x12\x12\n\x03uri\x18\x02 \x01(\tH\x00R\x03uriB\x06\n\x04\x64\x61ta\"C\n\rAbortMetadata\x12\x14\n\x05\x63\x61use\x18\x01 \x01(\tR\x05\x63\x61use\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\"\x98\x07\n\x10\x45xecutionClosure\x12>\n\x07outputs\x18\x01 \x01(\x0b\x32\x1e.flyteidl.admin.LiteralMapBlobB\x02\x18\x01H\x00R\x07outputs\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12%\n\x0b\x61\x62ort_cause\x18\n \x01(\tB\x02\x18\x01H\x00R\nabortCause\x12\x46\n\x0e\x61\x62ort_metadata\x18\x0c \x01(\x0b\x32\x1d.flyteidl.admin.AbortMetadataH\x00R\rabortMetadata\x12@\n\x0boutput_data\x18\r \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x46\n\x0f\x63omputed_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x0e\x63omputedInputs\x12<\n\x05phase\x18\x04 \x01(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x42\n\rnotifications\x18\t \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\x12:\n\x0bworkflow_id\x18\x0b \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12]\n\x14state_change_details\x18\x0e \x01(\x0b\x32+.flyteidl.admin.ExecutionStateChangeDetailsR\x12stateChangeDetailsB\x0f\n\routput_result\"[\n\x0eSystemMetadata\x12+\n\x11\x65xecution_cluster\x18\x01 \x01(\tR\x10\x65xecutionCluster\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\"\x8b\x05\n\x11\x45xecutionMetadata\x12\x43\n\x04mode\x18\x01 \x01(\x0e\x32/.flyteidl.admin.ExecutionMetadata.ExecutionModeR\x04mode\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\x12\x18\n\x07nesting\x18\x03 \x01(\rR\x07nesting\x12=\n\x0cscheduled_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0bscheduledAt\x12Z\n\x15parent_node_execution\x18\x05 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x13parentNodeExecution\x12[\n\x13reference_execution\x18\x10 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12G\n\x0fsystem_metadata\x18\x11 \x01(\x0b\x32\x1e.flyteidl.admin.SystemMetadataR\x0esystemMetadata\x12<\n\x0c\x61rtifact_ids\x18\x12 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\"z\n\rExecutionMode\x12\n\n\x06MANUAL\x10\x00\x12\r\n\tSCHEDULED\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\x0c\n\x08RELAUNCH\x10\x03\x12\x12\n\x0e\x43HILD_WORKFLOW\x10\x04\x12\r\n\tRECOVERED\x10\x05\x12\x0b\n\x07TRIGGER\x10\x06\"\x04\x08\x07\x10\x07\"V\n\x10NotificationList\x12\x42\n\rnotifications\x18\x01 \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\"\xd6\t\n\rExecutionSpec\x12:\n\x0blaunch_plan\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nlaunchPlan\x12\x35\n\x06inputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x06inputs\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\x12H\n\rnotifications\x18\x05 \x01(\x0b\x32 .flyteidl.admin.NotificationListH\x00R\rnotifications\x12!\n\x0b\x64isable_all\x18\x06 \x01(\x08H\x00R\ndisableAll\x12.\n\x06labels\x18\x07 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x08 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12I\n\x10security_context\x18\n \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12\x39\n\tauth_role\x18\x10 \x01(\x0b\x32\x18.flyteidl.admin.AuthRoleB\x02\x18\x01R\x08\x61uthRole\x12M\n\x12quality_of_service\x18\x11 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12\'\n\x0fmax_parallelism\x18\x12 \x01(\x05R\x0emaxParallelism\x12X\n\x16raw_output_data_config\x18\x13 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12P\n\x12\x63luster_assignment\x18\x14 \x01(\x0b\x32!.flyteidl.admin.ClusterAssignmentR\x11\x63lusterAssignment\x12@\n\rinterruptible\x18\x15 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x16 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x17 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\x12\x16\n\x04tags\x18\x18 \x03(\tB\x02\x18\x01R\x04tags\x12]\n\x17\x65xecution_cluster_label\x18\x19 \x01(\x0b\x32%.flyteidl.admin.ExecutionClusterLabelR\x15\x65xecutionClusterLabel\x12\x61\n\x19\x65xecution_env_assignments\x18\x1a \x03(\x0b\x32%.flyteidl.core.ExecutionEnvAssignmentR\x17\x65xecutionEnvAssignmentsB\x18\n\x16notification_overridesJ\x04\x08\x04\x10\x05\"m\n\x19\x45xecutionTerminateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x63\x61use\x18\x02 \x01(\tR\x05\x63\x61use\"\x1c\n\x1a\x45xecutionTerminateResponse\"]\n\x1fWorkflowExecutionGetDataRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\x88\x02\n WorkflowExecutionGetDataResponse\x12\x35\n\x07outputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12\x33\n\x06inputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\"\x8a\x01\n\x16\x45xecutionUpdateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x34\n\x05state\x18\x02 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\"\xae\x01\n\x1b\x45xecutionStateChangeDetails\x12\x34\n\x05state\x18\x01 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\x12;\n\x0boccurred_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1c\n\tprincipal\x18\x03 \x01(\tR\tprincipal\"\x19\n\x17\x45xecutionUpdateResponse\"v\n\"WorkflowExecutionGetMetricsRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x64\x65pth\x18\x02 \x01(\x05R\x05\x64\x65pth\"N\n#WorkflowExecutionGetMetricsResponse\x12\'\n\x04span\x18\x01 \x01(\x0b\x32\x13.flyteidl.core.SpanR\x04span*>\n\x0e\x45xecutionState\x12\x14\n\x10\x45XECUTION_ACTIVE\x10\x00\x12\x16\n\x12\x45XECUTION_ARCHIVED\x10\x01\x42\xba\x01\n\x12\x63om.flyteidl.adminB\x0e\x45xecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -55,8 +55,8 @@ _WORKFLOWEXECUTIONGETDATARESPONSE.fields_by_name['outputs']._serialized_options = b'\030\001' _WORKFLOWEXECUTIONGETDATARESPONSE.fields_by_name['inputs']._options = None _WORKFLOWEXECUTIONGETDATARESPONSE.fields_by_name['inputs']._serialized_options = b'\030\001' - _globals['_EXECUTIONSTATE']._serialized_start=5697 - _globals['_EXECUTIONSTATE']._serialized_end=5759 + _globals['_EXECUTIONSTATE']._serialized_start=5703 + _globals['_EXECUTIONSTATE']._serialized_end=5765 _globals['_EXECUTIONCREATEREQUEST']._serialized_start=480 _globals['_EXECUTIONCREATEREQUEST']._serialized_end=694 _globals['_EXECUTIONRELAUNCHREQUEST']._serialized_start=697 @@ -80,29 +80,29 @@ _globals['_SYSTEMMETADATA']._serialized_start=2579 _globals['_SYSTEMMETADATA']._serialized_end=2670 _globals['_EXECUTIONMETADATA']._serialized_start=2673 - _globals['_EXECUTIONMETADATA']._serialized_end=3318 + _globals['_EXECUTIONMETADATA']._serialized_end=3324 _globals['_EXECUTIONMETADATA_EXECUTIONMODE']._serialized_start=3202 - _globals['_EXECUTIONMETADATA_EXECUTIONMODE']._serialized_end=3318 - _globals['_NOTIFICATIONLIST']._serialized_start=3320 - _globals['_NOTIFICATIONLIST']._serialized_end=3406 - _globals['_EXECUTIONSPEC']._serialized_start=3409 - _globals['_EXECUTIONSPEC']._serialized_end=4647 - _globals['_EXECUTIONTERMINATEREQUEST']._serialized_start=4649 - _globals['_EXECUTIONTERMINATEREQUEST']._serialized_end=4758 - _globals['_EXECUTIONTERMINATERESPONSE']._serialized_start=4760 - _globals['_EXECUTIONTERMINATERESPONSE']._serialized_end=4788 - _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_start=4790 - _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_end=4883 - _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_start=4886 - _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_end=5150 - _globals['_EXECUTIONUPDATEREQUEST']._serialized_start=5153 - _globals['_EXECUTIONUPDATEREQUEST']._serialized_end=5291 - _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_start=5294 - _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_end=5468 - _globals['_EXECUTIONUPDATERESPONSE']._serialized_start=5470 - _globals['_EXECUTIONUPDATERESPONSE']._serialized_end=5495 - _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_start=5497 - _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_end=5615 - _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_start=5617 - _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_end=5695 + _globals['_EXECUTIONMETADATA_EXECUTIONMODE']._serialized_end=3324 + _globals['_NOTIFICATIONLIST']._serialized_start=3326 + _globals['_NOTIFICATIONLIST']._serialized_end=3412 + _globals['_EXECUTIONSPEC']._serialized_start=3415 + _globals['_EXECUTIONSPEC']._serialized_end=4653 + _globals['_EXECUTIONTERMINATEREQUEST']._serialized_start=4655 + _globals['_EXECUTIONTERMINATEREQUEST']._serialized_end=4764 + _globals['_EXECUTIONTERMINATERESPONSE']._serialized_start=4766 + _globals['_EXECUTIONTERMINATERESPONSE']._serialized_end=4794 + _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_start=4796 + _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_end=4889 + _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_start=4892 + _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_end=5156 + _globals['_EXECUTIONUPDATEREQUEST']._serialized_start=5159 + _globals['_EXECUTIONUPDATEREQUEST']._serialized_end=5297 + _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_start=5300 + _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_end=5474 + _globals['_EXECUTIONUPDATERESPONSE']._serialized_start=5476 + _globals['_EXECUTIONUPDATERESPONSE']._serialized_end=5501 + _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_start=5503 + _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_end=5621 + _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_start=5623 + _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_end=5701 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py index 93a29df4d6..b3035a8318 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py @@ -21,7 +21,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#flyteidl/admin/node_execution.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1b\x66lyteidl/core/catalog.proto\x1a\x1c\x66lyteidl/core/compiler.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\"Q\n\x17NodeExecutionGetRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x99\x02\n\x18NodeExecutionListRequest\x12^\n\x15workflow_execution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x13workflowExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12(\n\x10unique_parent_id\x18\x06 \x01(\tR\x0euniqueParentId\"\xea\x01\n\x1fNodeExecutionForTaskListRequest\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\"\xe7\x01\n\rNodeExecution\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\x12\x1b\n\tinput_uri\x18\x02 \x01(\tR\x08inputUri\x12>\n\x07\x63losure\x18\x03 \x01(\x0b\x32$.flyteidl.admin.NodeExecutionClosureR\x07\x63losure\x12\x41\n\x08metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.NodeExecutionMetaDataR\x08metadata\"\xba\x01\n\x15NodeExecutionMetaData\x12\x1f\n\x0bretry_group\x18\x01 \x01(\tR\nretryGroup\x12$\n\x0eis_parent_node\x18\x02 \x01(\x08R\x0cisParentNode\x12 \n\x0cspec_node_id\x18\x03 \x01(\tR\nspecNodeId\x12\x1d\n\nis_dynamic\x18\x04 \x01(\x08R\tisDynamic\x12\x19\n\x08is_array\x18\x05 \x01(\x08R\x07isArray\"q\n\x11NodeExecutionList\x12\x46\n\x0fnode_executions\x18\x01 \x03(\x0b\x32\x1d.flyteidl.admin.NodeExecutionR\x0enodeExecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\xf6\x05\n\x14NodeExecutionClosure\x12#\n\noutput_uri\x18\x01 \x01(\tB\x02\x18\x01H\x00R\toutputUri\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12@\n\x0boutput_data\x18\n \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x38\n\x05phase\x18\x03 \x01(\x0e\x32\".flyteidl.core.NodeExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\\\n\x16workflow_node_metadata\x18\x08 \x01(\x0b\x32$.flyteidl.admin.WorkflowNodeMetadataH\x01R\x14workflowNodeMetadata\x12P\n\x12task_node_metadata\x18\t \x01(\x0b\x32 .flyteidl.admin.TaskNodeMetadataH\x01R\x10taskNodeMetadata\x12\x19\n\x08\x64\x65\x63k_uri\x18\x0b \x01(\tR\x07\x64\x65\x63kUri\x12/\n\x14\x64ynamic_job_spec_uri\x18\x0c \x01(\tR\x11\x64ynamicJobSpecUriB\x0f\n\routput_resultB\x11\n\x0ftarget_metadata\"d\n\x14WorkflowNodeMetadata\x12L\n\x0b\x65xecutionId\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xc0\x01\n\x10TaskNodeMetadata\x12\x44\n\x0c\x63\x61\x63he_status\x18\x01 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12?\n\x0b\x63\x61talog_key\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.CatalogMetadataR\ncatalogKey\x12%\n\x0e\x63heckpoint_uri\x18\x04 \x01(\tR\rcheckpointUri\"\xce\x01\n\x1b\x44ynamicWorkflowNodeMetadata\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12S\n\x11\x63ompiled_workflow\x18\x02 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflow\x12/\n\x14\x64ynamic_job_spec_uri\x18\x03 \x01(\tR\x11\x64ynamicJobSpecUri\"U\n\x1bNodeExecutionGetDataRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x96\x03\n\x1cNodeExecutionGetDataResponse\x12\x33\n\x06inputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12\x35\n\x07outputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\x12V\n\x10\x64ynamic_workflow\x18\x10 \x01(\x0b\x32+.flyteidl.admin.DynamicWorkflowNodeMetadataR\x0f\x64ynamicWorkflow\x12\x38\n\nflyte_urls\x18\x11 \x01(\x0b\x32\x19.flyteidl.admin.FlyteURLsR\tflyteUrls\"W\n\x1dGetDynamicNodeWorkflowRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"r\n\x1b\x44ynamicNodeWorkflowResponse\x12S\n\x11\x63ompiled_workflow\x18\x01 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflowB\xbe\x01\n\x12\x63om.flyteidl.adminB\x12NodeExecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#flyteidl/admin/node_execution.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1b\x66lyteidl/core/catalog.proto\x1a\x1c\x66lyteidl/core/compiler.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\"Q\n\x17NodeExecutionGetRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x99\x02\n\x18NodeExecutionListRequest\x12^\n\x15workflow_execution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x13workflowExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12(\n\x10unique_parent_id\x18\x06 \x01(\tR\x0euniqueParentId\"\xea\x01\n\x1fNodeExecutionForTaskListRequest\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\"\xe7\x01\n\rNodeExecution\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\x12\x1b\n\tinput_uri\x18\x02 \x01(\tR\x08inputUri\x12>\n\x07\x63losure\x18\x03 \x01(\x0b\x32$.flyteidl.admin.NodeExecutionClosureR\x07\x63losure\x12\x41\n\x08metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.NodeExecutionMetaDataR\x08metadata\"\xd5\x01\n\x15NodeExecutionMetaData\x12\x1f\n\x0bretry_group\x18\x01 \x01(\tR\nretryGroup\x12$\n\x0eis_parent_node\x18\x02 \x01(\x08R\x0cisParentNode\x12 \n\x0cspec_node_id\x18\x03 \x01(\tR\nspecNodeId\x12\x1d\n\nis_dynamic\x18\x04 \x01(\x08R\tisDynamic\x12\x19\n\x08is_array\x18\x05 \x01(\x08R\x07isArray\x12\x19\n\x08is_eager\x18\x06 \x01(\x08R\x07isEager\"q\n\x11NodeExecutionList\x12\x46\n\x0fnode_executions\x18\x01 \x03(\x0b\x32\x1d.flyteidl.admin.NodeExecutionR\x0enodeExecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\xf6\x05\n\x14NodeExecutionClosure\x12#\n\noutput_uri\x18\x01 \x01(\tB\x02\x18\x01H\x00R\toutputUri\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12@\n\x0boutput_data\x18\n \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x38\n\x05phase\x18\x03 \x01(\x0e\x32\".flyteidl.core.NodeExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\\\n\x16workflow_node_metadata\x18\x08 \x01(\x0b\x32$.flyteidl.admin.WorkflowNodeMetadataH\x01R\x14workflowNodeMetadata\x12P\n\x12task_node_metadata\x18\t \x01(\x0b\x32 .flyteidl.admin.TaskNodeMetadataH\x01R\x10taskNodeMetadata\x12\x19\n\x08\x64\x65\x63k_uri\x18\x0b \x01(\tR\x07\x64\x65\x63kUri\x12/\n\x14\x64ynamic_job_spec_uri\x18\x0c \x01(\tR\x11\x64ynamicJobSpecUriB\x0f\n\routput_resultB\x11\n\x0ftarget_metadata\"d\n\x14WorkflowNodeMetadata\x12L\n\x0b\x65xecutionId\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xc0\x01\n\x10TaskNodeMetadata\x12\x44\n\x0c\x63\x61\x63he_status\x18\x01 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12?\n\x0b\x63\x61talog_key\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.CatalogMetadataR\ncatalogKey\x12%\n\x0e\x63heckpoint_uri\x18\x04 \x01(\tR\rcheckpointUri\"\xce\x01\n\x1b\x44ynamicWorkflowNodeMetadata\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12S\n\x11\x63ompiled_workflow\x18\x02 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflow\x12/\n\x14\x64ynamic_job_spec_uri\x18\x03 \x01(\tR\x11\x64ynamicJobSpecUri\"U\n\x1bNodeExecutionGetDataRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x96\x03\n\x1cNodeExecutionGetDataResponse\x12\x33\n\x06inputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12\x35\n\x07outputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\x12V\n\x10\x64ynamic_workflow\x18\x10 \x01(\x0b\x32+.flyteidl.admin.DynamicWorkflowNodeMetadataR\x0f\x64ynamicWorkflow\x12\x38\n\nflyte_urls\x18\x11 \x01(\x0b\x32\x19.flyteidl.admin.FlyteURLsR\tflyteUrls\"W\n\x1dGetDynamicNodeWorkflowRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"r\n\x1b\x44ynamicNodeWorkflowResponse\x12S\n\x11\x63ompiled_workflow\x18\x01 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflowB\xbe\x01\n\x12\x63om.flyteidl.adminB\x12NodeExecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -47,23 +47,23 @@ _globals['_NODEEXECUTION']._serialized_start=906 _globals['_NODEEXECUTION']._serialized_end=1137 _globals['_NODEEXECUTIONMETADATA']._serialized_start=1140 - _globals['_NODEEXECUTIONMETADATA']._serialized_end=1326 - _globals['_NODEEXECUTIONLIST']._serialized_start=1328 - _globals['_NODEEXECUTIONLIST']._serialized_end=1441 - _globals['_NODEEXECUTIONCLOSURE']._serialized_start=1444 - _globals['_NODEEXECUTIONCLOSURE']._serialized_end=2202 - _globals['_WORKFLOWNODEMETADATA']._serialized_start=2204 - _globals['_WORKFLOWNODEMETADATA']._serialized_end=2304 - _globals['_TASKNODEMETADATA']._serialized_start=2307 - _globals['_TASKNODEMETADATA']._serialized_end=2499 - _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_start=2502 - _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_end=2708 - _globals['_NODEEXECUTIONGETDATAREQUEST']._serialized_start=2710 - _globals['_NODEEXECUTIONGETDATAREQUEST']._serialized_end=2795 - _globals['_NODEEXECUTIONGETDATARESPONSE']._serialized_start=2798 - _globals['_NODEEXECUTIONGETDATARESPONSE']._serialized_end=3204 - _globals['_GETDYNAMICNODEWORKFLOWREQUEST']._serialized_start=3206 - _globals['_GETDYNAMICNODEWORKFLOWREQUEST']._serialized_end=3293 - _globals['_DYNAMICNODEWORKFLOWRESPONSE']._serialized_start=3295 - _globals['_DYNAMICNODEWORKFLOWRESPONSE']._serialized_end=3409 + _globals['_NODEEXECUTIONMETADATA']._serialized_end=1353 + _globals['_NODEEXECUTIONLIST']._serialized_start=1355 + _globals['_NODEEXECUTIONLIST']._serialized_end=1468 + _globals['_NODEEXECUTIONCLOSURE']._serialized_start=1471 + _globals['_NODEEXECUTIONCLOSURE']._serialized_end=2229 + _globals['_WORKFLOWNODEMETADATA']._serialized_start=2231 + _globals['_WORKFLOWNODEMETADATA']._serialized_end=2331 + _globals['_TASKNODEMETADATA']._serialized_start=2334 + _globals['_TASKNODEMETADATA']._serialized_end=2526 + _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_start=2529 + _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_end=2735 + _globals['_NODEEXECUTIONGETDATAREQUEST']._serialized_start=2737 + _globals['_NODEEXECUTIONGETDATAREQUEST']._serialized_end=2822 + _globals['_NODEEXECUTIONGETDATARESPONSE']._serialized_start=2825 + _globals['_NODEEXECUTIONGETDATARESPONSE']._serialized_end=3231 + _globals['_GETDYNAMICNODEWORKFLOWREQUEST']._serialized_start=3233 + _globals['_GETDYNAMICNODEWORKFLOWREQUEST']._serialized_end=3320 + _globals['_DYNAMICNODEWORKFLOWRESPONSE']._serialized_start=3322 + _globals['_DYNAMICNODEWORKFLOWRESPONSE']._serialized_end=3436 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi index 9bf601847d..091e61fb4e 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi @@ -62,18 +62,20 @@ class NodeExecution(_message.Message): def __init__(self, id: _Optional[_Union[_identifier_pb2.NodeExecutionIdentifier, _Mapping]] = ..., input_uri: _Optional[str] = ..., closure: _Optional[_Union[NodeExecutionClosure, _Mapping]] = ..., metadata: _Optional[_Union[NodeExecutionMetaData, _Mapping]] = ...) -> None: ... class NodeExecutionMetaData(_message.Message): - __slots__ = ["retry_group", "is_parent_node", "spec_node_id", "is_dynamic", "is_array"] + __slots__ = ["retry_group", "is_parent_node", "spec_node_id", "is_dynamic", "is_array", "is_eager"] RETRY_GROUP_FIELD_NUMBER: _ClassVar[int] IS_PARENT_NODE_FIELD_NUMBER: _ClassVar[int] SPEC_NODE_ID_FIELD_NUMBER: _ClassVar[int] IS_DYNAMIC_FIELD_NUMBER: _ClassVar[int] IS_ARRAY_FIELD_NUMBER: _ClassVar[int] + IS_EAGER_FIELD_NUMBER: _ClassVar[int] retry_group: str is_parent_node: bool spec_node_id: str is_dynamic: bool is_array: bool - def __init__(self, retry_group: _Optional[str] = ..., is_parent_node: bool = ..., spec_node_id: _Optional[str] = ..., is_dynamic: bool = ..., is_array: bool = ...) -> None: ... + is_eager: bool + def __init__(self, retry_group: _Optional[str] = ..., is_parent_node: bool = ..., spec_node_id: _Optional[str] = ..., is_dynamic: bool = ..., is_array: bool = ..., is_eager: bool = ...) -> None: ... class NodeExecutionList(_message.Message): __slots__ = ["node_executions", "token"] diff --git a/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.py b/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.py index 9b0a9f9ed8..b377ad108c 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.py @@ -17,7 +17,7 @@ from flyteidl.core import types_pb2 as flyteidl_dot_core_dot_types__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/core/literals.proto\x12\rflyteidl.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x19\x66lyteidl/core/types.proto\"\x87\x02\n\tPrimitive\x12\x1a\n\x07integer\x18\x01 \x01(\x03H\x00R\x07integer\x12!\n\x0b\x66loat_value\x18\x02 \x01(\x01H\x00R\nfloatValue\x12#\n\x0cstring_value\x18\x03 \x01(\tH\x00R\x0bstringValue\x12\x1a\n\x07\x62oolean\x18\x04 \x01(\x08H\x00R\x07\x62oolean\x12\x38\n\x08\x64\x61tetime\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\x08\x64\x61tetime\x12\x37\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00R\x08\x64urationB\x07\n\x05value\"\x06\n\x04Void\"Q\n\x04\x42lob\x12\x37\n\x08metadata\x18\x01 \x01(\x0b\x32\x1b.flyteidl.core.BlobMetadataR\x08metadata\x12\x10\n\x03uri\x18\x03 \x01(\tR\x03uri\";\n\x0c\x42lobMetadata\x12+\n\x04type\x18\x01 \x01(\x0b\x32\x17.flyteidl.core.BlobTypeR\x04type\"0\n\x06\x42inary\x12\x14\n\x05value\x18\x01 \x01(\x0cR\x05value\x12\x10\n\x03tag\x18\x02 \x01(\tR\x03tag\"I\n\x06Schema\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12-\n\x04type\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.SchemaTypeR\x04type\"e\n\x05Union\x12,\n\x05value\x18\x01 \x01(\x0b\x32\x16.flyteidl.core.LiteralR\x05value\x12.\n\x04type\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\x04type\"y\n\x19StructuredDatasetMetadata\x12\\\n\x17structured_dataset_type\x18\x01 \x01(\x0b\x32$.flyteidl.core.StructuredDatasetTypeR\x15structuredDatasetType\"k\n\x11StructuredDataset\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12\x44\n\x08metadata\x18\x02 \x01(\x0b\x32(.flyteidl.core.StructuredDatasetMetadataR\x08metadata\"\xf0\x03\n\x06Scalar\x12\x38\n\tprimitive\x18\x01 \x01(\x0b\x32\x18.flyteidl.core.PrimitiveH\x00R\tprimitive\x12)\n\x04\x62lob\x18\x02 \x01(\x0b\x32\x13.flyteidl.core.BlobH\x00R\x04\x62lob\x12/\n\x06\x62inary\x18\x03 \x01(\x0b\x32\x15.flyteidl.core.BinaryH\x00R\x06\x62inary\x12/\n\x06schema\x18\x04 \x01(\x0b\x32\x15.flyteidl.core.SchemaH\x00R\x06schema\x12\x32\n\tnone_type\x18\x05 \x01(\x0b\x32\x13.flyteidl.core.VoidH\x00R\x08noneType\x12,\n\x05\x65rror\x18\x06 \x01(\x0b\x32\x14.flyteidl.core.ErrorH\x00R\x05\x65rror\x12\x33\n\x07generic\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructH\x00R\x07generic\x12Q\n\x12structured_dataset\x18\x08 \x01(\x0b\x32 .flyteidl.core.StructuredDatasetH\x00R\x11structuredDataset\x12,\n\x05union\x18\t \x01(\x0b\x32\x14.flyteidl.core.UnionH\x00R\x05unionB\x07\n\x05value\"\xaf\x03\n\x07Literal\x12/\n\x06scalar\x18\x01 \x01(\x0b\x32\x15.flyteidl.core.ScalarH\x00R\x06scalar\x12\x42\n\ncollection\x18\x02 \x01(\x0b\x32 .flyteidl.core.LiteralCollectionH\x00R\ncollection\x12-\n\x03map\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\x03map\x12X\n\x12offloaded_metadata\x18\x08 \x01(\x0b\x32\'.flyteidl.core.LiteralOffloadedMetadataH\x00R\x11offloadedMetadata\x12\x12\n\x04hash\x18\x04 \x01(\tR\x04hash\x12@\n\x08metadata\x18\x05 \x03(\x0b\x32$.flyteidl.core.Literal.MetadataEntryR\x08metadata\x1a;\n\rMetadataEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x07\n\x05valueJ\x04\x08\x06\x10\x07J\x04\x08\x07\x10\x08\"\x8c\x01\n\x18LiteralOffloadedMetadata\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12\x1d\n\nsize_bytes\x18\x02 \x01(\x04R\tsizeBytes\x12?\n\rinferred_type\x18\x03 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\x0cinferredType\"G\n\x11LiteralCollection\x12\x32\n\x08literals\x18\x01 \x03(\x0b\x32\x16.flyteidl.core.LiteralR\x08literals\"\xa6\x01\n\nLiteralMap\x12\x43\n\x08literals\x18\x01 \x03(\x0b\x32\'.flyteidl.core.LiteralMap.LiteralsEntryR\x08literals\x1aS\n\rLiteralsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.flyteidl.core.LiteralR\x05value:\x02\x38\x01\"O\n\x15\x42indingDataCollection\x12\x36\n\x08\x62indings\x18\x01 \x03(\x0b\x32\x1a.flyteidl.core.BindingDataR\x08\x62indings\"\xb2\x01\n\x0e\x42indingDataMap\x12G\n\x08\x62indings\x18\x01 \x03(\x0b\x32+.flyteidl.core.BindingDataMap.BindingsEntryR\x08\x62indings\x1aW\n\rBindingsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.BindingDataR\x05value:\x02\x38\x01\"G\n\tUnionInfo\x12:\n\ntargetType\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\ntargetType\"\xae\x02\n\x0b\x42indingData\x12/\n\x06scalar\x18\x01 \x01(\x0b\x32\x15.flyteidl.core.ScalarH\x00R\x06scalar\x12\x46\n\ncollection\x18\x02 \x01(\x0b\x32$.flyteidl.core.BindingDataCollectionH\x00R\ncollection\x12:\n\x07promise\x18\x03 \x01(\x0b\x32\x1e.flyteidl.core.OutputReferenceH\x00R\x07promise\x12\x31\n\x03map\x18\x04 \x01(\x0b\x32\x1d.flyteidl.core.BindingDataMapH\x00R\x03map\x12.\n\x05union\x18\x05 \x01(\x0b\x32\x18.flyteidl.core.UnionInfoR\x05unionB\x07\n\x05value\"Q\n\x07\x42inding\x12\x10\n\x03var\x18\x01 \x01(\tR\x03var\x12\x34\n\x07\x62inding\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.BindingDataR\x07\x62inding\"6\n\x0cKeyValuePair\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\")\n\rRetryStrategy\x12\x18\n\x07retries\x18\x05 \x01(\rR\x07retriesB\xb3\x01\n\x11\x63om.flyteidl.coreB\rLiteralsProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/core/literals.proto\x12\rflyteidl.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x19\x66lyteidl/core/types.proto\"\x87\x02\n\tPrimitive\x12\x1a\n\x07integer\x18\x01 \x01(\x03H\x00R\x07integer\x12!\n\x0b\x66loat_value\x18\x02 \x01(\x01H\x00R\nfloatValue\x12#\n\x0cstring_value\x18\x03 \x01(\tH\x00R\x0bstringValue\x12\x1a\n\x07\x62oolean\x18\x04 \x01(\x08H\x00R\x07\x62oolean\x12\x38\n\x08\x64\x61tetime\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\x08\x64\x61tetime\x12\x37\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00R\x08\x64urationB\x07\n\x05value\"\x06\n\x04Void\"Q\n\x04\x42lob\x12\x37\n\x08metadata\x18\x01 \x01(\x0b\x32\x1b.flyteidl.core.BlobMetadataR\x08metadata\x12\x10\n\x03uri\x18\x03 \x01(\tR\x03uri\";\n\x0c\x42lobMetadata\x12+\n\x04type\x18\x01 \x01(\x0b\x32\x17.flyteidl.core.BlobTypeR\x04type\"0\n\x06\x42inary\x12\x14\n\x05value\x18\x01 \x01(\x0cR\x05value\x12\x10\n\x03tag\x18\x02 \x01(\tR\x03tag\"I\n\x06Schema\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12-\n\x04type\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.SchemaTypeR\x04type\"e\n\x05Union\x12,\n\x05value\x18\x01 \x01(\x0b\x32\x16.flyteidl.core.LiteralR\x05value\x12.\n\x04type\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\x04type\"y\n\x19StructuredDatasetMetadata\x12\\\n\x17structured_dataset_type\x18\x01 \x01(\x0b\x32$.flyteidl.core.StructuredDatasetTypeR\x15structuredDatasetType\"k\n\x11StructuredDataset\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12\x44\n\x08metadata\x18\x02 \x01(\x0b\x32(.flyteidl.core.StructuredDatasetMetadataR\x08metadata\"\xf0\x03\n\x06Scalar\x12\x38\n\tprimitive\x18\x01 \x01(\x0b\x32\x18.flyteidl.core.PrimitiveH\x00R\tprimitive\x12)\n\x04\x62lob\x18\x02 \x01(\x0b\x32\x13.flyteidl.core.BlobH\x00R\x04\x62lob\x12/\n\x06\x62inary\x18\x03 \x01(\x0b\x32\x15.flyteidl.core.BinaryH\x00R\x06\x62inary\x12/\n\x06schema\x18\x04 \x01(\x0b\x32\x15.flyteidl.core.SchemaH\x00R\x06schema\x12\x32\n\tnone_type\x18\x05 \x01(\x0b\x32\x13.flyteidl.core.VoidH\x00R\x08noneType\x12,\n\x05\x65rror\x18\x06 \x01(\x0b\x32\x14.flyteidl.core.ErrorH\x00R\x05\x65rror\x12\x33\n\x07generic\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructH\x00R\x07generic\x12Q\n\x12structured_dataset\x18\x08 \x01(\x0b\x32 .flyteidl.core.StructuredDatasetH\x00R\x11structuredDataset\x12,\n\x05union\x18\t \x01(\x0b\x32\x14.flyteidl.core.UnionH\x00R\x05unionB\x07\n\x05value\"\xaf\x03\n\x07Literal\x12/\n\x06scalar\x18\x01 \x01(\x0b\x32\x15.flyteidl.core.ScalarH\x00R\x06scalar\x12\x42\n\ncollection\x18\x02 \x01(\x0b\x32 .flyteidl.core.LiteralCollectionH\x00R\ncollection\x12-\n\x03map\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\x03map\x12X\n\x12offloaded_metadata\x18\x08 \x01(\x0b\x32\'.flyteidl.core.LiteralOffloadedMetadataH\x00R\x11offloadedMetadata\x12\x12\n\x04hash\x18\x04 \x01(\tR\x04hash\x12@\n\x08metadata\x18\x05 \x03(\x0b\x32$.flyteidl.core.Literal.MetadataEntryR\x08metadata\x1a;\n\rMetadataEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x07\n\x05valueJ\x04\x08\x06\x10\x07J\x04\x08\x07\x10\x08\"\x8c\x01\n\x18LiteralOffloadedMetadata\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12\x1d\n\nsize_bytes\x18\x02 \x01(\x04R\tsizeBytes\x12?\n\rinferred_type\x18\x03 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\x0cinferredType\"G\n\x11LiteralCollection\x12\x32\n\x08literals\x18\x01 \x03(\x0b\x32\x16.flyteidl.core.LiteralR\x08literals\"\xa6\x01\n\nLiteralMap\x12\x43\n\x08literals\x18\x01 \x03(\x0b\x32\'.flyteidl.core.LiteralMap.LiteralsEntryR\x08literals\x1aS\n\rLiteralsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.flyteidl.core.LiteralR\x05value:\x02\x38\x01\"O\n\x15\x42indingDataCollection\x12\x36\n\x08\x62indings\x18\x01 \x03(\x0b\x32\x1a.flyteidl.core.BindingDataR\x08\x62indings\"\xb2\x01\n\x0e\x42indingDataMap\x12G\n\x08\x62indings\x18\x01 \x03(\x0b\x32+.flyteidl.core.BindingDataMap.BindingsEntryR\x08\x62indings\x1aW\n\rBindingsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.BindingDataR\x05value:\x02\x38\x01\"G\n\tUnionInfo\x12:\n\ntargetType\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\ntargetType\"\x88\x03\n\x0b\x42indingData\x12/\n\x06scalar\x18\x01 \x01(\x0b\x32\x15.flyteidl.core.ScalarH\x00R\x06scalar\x12\x46\n\ncollection\x18\x02 \x01(\x0b\x32$.flyteidl.core.BindingDataCollectionH\x00R\ncollection\x12:\n\x07promise\x18\x03 \x01(\x0b\x32\x1e.flyteidl.core.OutputReferenceH\x00R\x07promise\x12\x31\n\x03map\x18\x04 \x01(\x0b\x32\x1d.flyteidl.core.BindingDataMapH\x00R\x03map\x12X\n\x12offloaded_metadata\x18\x06 \x01(\x0b\x32\'.flyteidl.core.LiteralOffloadedMetadataH\x00R\x11offloadedMetadata\x12.\n\x05union\x18\x05 \x01(\x0b\x32\x18.flyteidl.core.UnionInfoR\x05unionB\x07\n\x05value\"Q\n\x07\x42inding\x12\x10\n\x03var\x18\x01 \x01(\tR\x03var\x12\x34\n\x07\x62inding\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.BindingDataR\x07\x62inding\"6\n\x0cKeyValuePair\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\")\n\rRetryStrategy\x12\x18\n\x07retries\x18\x05 \x01(\rR\x07retriesB\xb3\x01\n\x11\x63om.flyteidl.coreB\rLiteralsProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -73,11 +73,11 @@ _globals['_UNIONINFO']._serialized_start=2627 _globals['_UNIONINFO']._serialized_end=2698 _globals['_BINDINGDATA']._serialized_start=2701 - _globals['_BINDINGDATA']._serialized_end=3003 - _globals['_BINDING']._serialized_start=3005 - _globals['_BINDING']._serialized_end=3086 - _globals['_KEYVALUEPAIR']._serialized_start=3088 - _globals['_KEYVALUEPAIR']._serialized_end=3142 - _globals['_RETRYSTRATEGY']._serialized_start=3144 - _globals['_RETRYSTRATEGY']._serialized_end=3185 + _globals['_BINDINGDATA']._serialized_end=3093 + _globals['_BINDING']._serialized_start=3095 + _globals['_BINDING']._serialized_end=3176 + _globals['_KEYVALUEPAIR']._serialized_start=3178 + _globals['_KEYVALUEPAIR']._serialized_end=3232 + _globals['_RETRYSTRATEGY']._serialized_start=3234 + _globals['_RETRYSTRATEGY']._serialized_end=3275 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.pyi index e2337f8efa..9f15eea97b 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/core/literals_pb2.pyi @@ -181,18 +181,20 @@ class UnionInfo(_message.Message): def __init__(self, targetType: _Optional[_Union[_types_pb2.LiteralType, _Mapping]] = ...) -> None: ... class BindingData(_message.Message): - __slots__ = ["scalar", "collection", "promise", "map", "union"] + __slots__ = ["scalar", "collection", "promise", "map", "offloaded_metadata", "union"] SCALAR_FIELD_NUMBER: _ClassVar[int] COLLECTION_FIELD_NUMBER: _ClassVar[int] PROMISE_FIELD_NUMBER: _ClassVar[int] MAP_FIELD_NUMBER: _ClassVar[int] + OFFLOADED_METADATA_FIELD_NUMBER: _ClassVar[int] UNION_FIELD_NUMBER: _ClassVar[int] scalar: Scalar collection: BindingDataCollection promise: _types_pb2.OutputReference map: BindingDataMap + offloaded_metadata: LiteralOffloadedMetadata union: UnionInfo - def __init__(self, scalar: _Optional[_Union[Scalar, _Mapping]] = ..., collection: _Optional[_Union[BindingDataCollection, _Mapping]] = ..., promise: _Optional[_Union[_types_pb2.OutputReference, _Mapping]] = ..., map: _Optional[_Union[BindingDataMap, _Mapping]] = ..., union: _Optional[_Union[UnionInfo, _Mapping]] = ...) -> None: ... + def __init__(self, scalar: _Optional[_Union[Scalar, _Mapping]] = ..., collection: _Optional[_Union[BindingDataCollection, _Mapping]] = ..., promise: _Optional[_Union[_types_pb2.OutputReference, _Mapping]] = ..., map: _Optional[_Union[BindingDataMap, _Mapping]] = ..., offloaded_metadata: _Optional[_Union[LiteralOffloadedMetadata, _Mapping]] = ..., union: _Optional[_Union[UnionInfo, _Mapping]] = ...) -> None: ... class Binding(_message.Message): __slots__ = ["var", "binding"] diff --git a/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.py b/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.py index 6add4552b9..43beeeeca2 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.py @@ -19,7 +19,7 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x66lyteidl/core/tasks.proto\x12\rflyteidl.core\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xd0\x02\n\tResources\x12\x42\n\x08requests\x18\x01 \x03(\x0b\x32&.flyteidl.core.Resources.ResourceEntryR\x08requests\x12>\n\x06limits\x18\x02 \x03(\x0b\x32&.flyteidl.core.Resources.ResourceEntryR\x06limits\x1a`\n\rResourceEntry\x12\x39\n\x04name\x18\x01 \x01(\x0e\x32%.flyteidl.core.Resources.ResourceNameR\x04name\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"]\n\x0cResourceName\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x43PU\x10\x01\x12\x07\n\x03GPU\x10\x02\x12\n\n\x06MEMORY\x10\x03\x12\x0b\n\x07STORAGE\x10\x04\x12\x15\n\x11\x45PHEMERAL_STORAGE\x10\x05\"\x91\x01\n\x0eGPUAccelerator\x12\x16\n\x06\x64\x65vice\x18\x01 \x01(\tR\x06\x64\x65vice\x12&\n\runpartitioned\x18\x02 \x01(\x08H\x00R\runpartitioned\x12\'\n\x0epartition_size\x18\x03 \x01(\tH\x00R\rpartitionSizeB\x16\n\x14partition_size_value\"[\n\x11\x45xtendedResources\x12\x46\n\x0fgpu_accelerator\x18\x01 \x01(\x0b\x32\x1d.flyteidl.core.GPUAcceleratorR\x0egpuAccelerator\"\xac\x01\n\x0fRuntimeMetadata\x12>\n\x04type\x18\x01 \x01(\x0e\x32*.flyteidl.core.RuntimeMetadata.RuntimeTypeR\x04type\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\x12\x16\n\x06\x66lavor\x18\x03 \x01(\tR\x06\x66lavor\"\'\n\x0bRuntimeType\x12\t\n\x05OTHER\x10\x00\x12\r\n\tFLYTE_SDK\x10\x01\"\xac\x05\n\x0cTaskMetadata\x12\"\n\x0c\x64iscoverable\x18\x01 \x01(\x08R\x0c\x64iscoverable\x12\x38\n\x07runtime\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.RuntimeMetadataR\x07runtime\x12\x33\n\x07timeout\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationR\x07timeout\x12\x36\n\x07retries\x18\x05 \x01(\x0b\x32\x1c.flyteidl.core.RetryStrategyR\x07retries\x12+\n\x11\x64iscovery_version\x18\x06 \x01(\tR\x10\x64iscoveryVersion\x12\x38\n\x18\x64\x65precated_error_message\x18\x07 \x01(\tR\x16\x64\x65precatedErrorMessage\x12&\n\rinterruptible\x18\x08 \x01(\x08H\x00R\rinterruptible\x12-\n\x12\x63\x61\x63he_serializable\x18\t \x01(\x08R\x11\x63\x61\x63heSerializable\x12%\n\x0egenerates_deck\x18\n \x01(\x08R\rgeneratesDeck\x12\x39\n\x04tags\x18\x0b \x03(\x0b\x32%.flyteidl.core.TaskMetadata.TagsEntryR\x04tags\x12*\n\x11pod_template_name\x18\x0c \x01(\tR\x0fpodTemplateName\x12\x35\n\x17\x63\x61\x63he_ignore_input_vars\x18\r \x03(\tR\x14\x63\x61\x63heIgnoreInputVars\x1a\x37\n\tTagsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x15\n\x13interruptible_value\"\xd6\x05\n\x0cTaskTemplate\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x12\n\x04type\x18\x02 \x01(\tR\x04type\x12\x37\n\x08metadata\x18\x03 \x01(\x0b\x32\x1b.flyteidl.core.TaskMetadataR\x08metadata\x12;\n\tinterface\x18\x04 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\tinterface\x12/\n\x06\x63ustom\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructR\x06\x63ustom\x12\x38\n\tcontainer\x18\x06 \x01(\x0b\x32\x18.flyteidl.core.ContainerH\x00R\tcontainer\x12\x30\n\x07k8s_pod\x18\x11 \x01(\x0b\x32\x15.flyteidl.core.K8sPodH\x00R\x06k8sPod\x12&\n\x03sql\x18\x12 \x01(\x0b\x32\x12.flyteidl.core.SqlH\x00R\x03sql\x12*\n\x11task_type_version\x18\x07 \x01(\x05R\x0ftaskTypeVersion\x12I\n\x10security_context\x18\x08 \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12O\n\x12\x65xtended_resources\x18\t \x01(\x0b\x32 .flyteidl.core.ExtendedResourcesR\x11\x65xtendedResources\x12?\n\x06\x63onfig\x18\x10 \x03(\x0b\x32\'.flyteidl.core.TaskTemplate.ConfigEntryR\x06\x63onfig\x1a\x39\n\x0b\x43onfigEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x08\n\x06target\"6\n\rContainerPort\x12%\n\x0e\x63ontainer_port\x18\x01 \x01(\rR\rcontainerPort\"\xfc\x03\n\tContainer\x12\x14\n\x05image\x18\x01 \x01(\tR\x05image\x12\x18\n\x07\x63ommand\x18\x02 \x03(\tR\x07\x63ommand\x12\x12\n\x04\x61rgs\x18\x03 \x03(\tR\x04\x61rgs\x12\x36\n\tresources\x18\x04 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresources\x12-\n\x03\x65nv\x18\x05 \x03(\x0b\x32\x1b.flyteidl.core.KeyValuePairR\x03\x65nv\x12\x37\n\x06\x63onfig\x18\x06 \x03(\x0b\x32\x1b.flyteidl.core.KeyValuePairB\x02\x18\x01R\x06\x63onfig\x12\x32\n\x05ports\x18\x07 \x03(\x0b\x32\x1c.flyteidl.core.ContainerPortR\x05ports\x12\x41\n\x0b\x64\x61ta_config\x18\t \x01(\x0b\x32 .flyteidl.core.DataLoadingConfigR\ndataConfig\x12I\n\x0c\x61rchitecture\x18\n \x01(\x0e\x32%.flyteidl.core.Container.ArchitectureR\x0c\x61rchitecture\"I\n\x0c\x41rchitecture\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05\x41MD64\x10\x01\x12\t\n\x05\x41RM64\x10\x02\x12\n\n\x06\x41RM_V6\x10\x03\x12\n\n\x06\x41RM_V7\x10\x04\"\xb5\x02\n\nIOStrategy\x12K\n\rdownload_mode\x18\x01 \x01(\x0e\x32&.flyteidl.core.IOStrategy.DownloadModeR\x0c\x64ownloadMode\x12\x45\n\x0bupload_mode\x18\x02 \x01(\x0e\x32$.flyteidl.core.IOStrategy.UploadModeR\nuploadMode\"L\n\x0c\x44ownloadMode\x12\x12\n\x0e\x44OWNLOAD_EAGER\x10\x00\x12\x13\n\x0f\x44OWNLOAD_STREAM\x10\x01\x12\x13\n\x0f\x44O_NOT_DOWNLOAD\x10\x02\"E\n\nUploadMode\x12\x12\n\x0eUPLOAD_ON_EXIT\x10\x00\x12\x10\n\x0cUPLOAD_EAGER\x10\x01\x12\x11\n\rDO_NOT_UPLOAD\x10\x02\"\xa7\x02\n\x11\x44\x61taLoadingConfig\x12\x18\n\x07\x65nabled\x18\x01 \x01(\x08R\x07\x65nabled\x12\x1d\n\ninput_path\x18\x02 \x01(\tR\tinputPath\x12\x1f\n\x0boutput_path\x18\x03 \x01(\tR\noutputPath\x12I\n\x06\x66ormat\x18\x04 \x01(\x0e\x32\x31.flyteidl.core.DataLoadingConfig.LiteralMapFormatR\x06\x66ormat\x12:\n\x0bio_strategy\x18\x05 \x01(\x0b\x32\x19.flyteidl.core.IOStrategyR\nioStrategy\"1\n\x10LiteralMapFormat\x12\x08\n\x04JSON\x10\x00\x12\x08\n\x04YAML\x10\x01\x12\t\n\x05PROTO\x10\x02\"\xbd\x01\n\x06K8sPod\x12<\n\x08metadata\x18\x01 \x01(\x0b\x32 .flyteidl.core.K8sObjectMetadataR\x08metadata\x12\x32\n\x08pod_spec\x18\x02 \x01(\x0b\x32\x17.google.protobuf.StructR\x07podSpec\x12\x41\n\x0b\x64\x61ta_config\x18\x03 \x01(\x0b\x32 .flyteidl.core.DataLoadingConfigR\ndataConfig\"\xa9\x02\n\x11K8sObjectMetadata\x12\x44\n\x06labels\x18\x01 \x03(\x0b\x32,.flyteidl.core.K8sObjectMetadata.LabelsEntryR\x06labels\x12S\n\x0b\x61nnotations\x18\x02 \x03(\x0b\x32\x31.flyteidl.core.K8sObjectMetadata.AnnotationsEntryR\x0b\x61nnotations\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a>\n\x10\x41nnotationsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x92\x01\n\x03Sql\x12\x1c\n\tstatement\x18\x01 \x01(\tR\tstatement\x12\x34\n\x07\x64ialect\x18\x02 \x01(\x0e\x32\x1a.flyteidl.core.Sql.DialectR\x07\x64ialect\"7\n\x07\x44ialect\x12\r\n\tUNDEFINED\x10\x00\x12\x08\n\x04\x41NSI\x10\x01\x12\x08\n\x04HIVE\x10\x02\x12\t\n\x05OTHER\x10\x03\x42\xb0\x01\n\x11\x63om.flyteidl.coreB\nTasksProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19\x66lyteidl/core/tasks.proto\x12\rflyteidl.core\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xd0\x02\n\tResources\x12\x42\n\x08requests\x18\x01 \x03(\x0b\x32&.flyteidl.core.Resources.ResourceEntryR\x08requests\x12>\n\x06limits\x18\x02 \x03(\x0b\x32&.flyteidl.core.Resources.ResourceEntryR\x06limits\x1a`\n\rResourceEntry\x12\x39\n\x04name\x18\x01 \x01(\x0e\x32%.flyteidl.core.Resources.ResourceNameR\x04name\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"]\n\x0cResourceName\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x43PU\x10\x01\x12\x07\n\x03GPU\x10\x02\x12\n\n\x06MEMORY\x10\x03\x12\x0b\n\x07STORAGE\x10\x04\x12\x15\n\x11\x45PHEMERAL_STORAGE\x10\x05\"\x91\x01\n\x0eGPUAccelerator\x12\x16\n\x06\x64\x65vice\x18\x01 \x01(\tR\x06\x64\x65vice\x12&\n\runpartitioned\x18\x02 \x01(\x08H\x00R\runpartitioned\x12\'\n\x0epartition_size\x18\x03 \x01(\tH\x00R\rpartitionSizeB\x16\n\x14partition_size_value\"[\n\x11\x45xtendedResources\x12\x46\n\x0fgpu_accelerator\x18\x01 \x01(\x0b\x32\x1d.flyteidl.core.GPUAcceleratorR\x0egpuAccelerator\"\xac\x01\n\x0fRuntimeMetadata\x12>\n\x04type\x18\x01 \x01(\x0e\x32*.flyteidl.core.RuntimeMetadata.RuntimeTypeR\x04type\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\x12\x16\n\x06\x66lavor\x18\x03 \x01(\tR\x06\x66lavor\"\'\n\x0bRuntimeType\x12\t\n\x05OTHER\x10\x00\x12\r\n\tFLYTE_SDK\x10\x01\"\xc7\x05\n\x0cTaskMetadata\x12\"\n\x0c\x64iscoverable\x18\x01 \x01(\x08R\x0c\x64iscoverable\x12\x38\n\x07runtime\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.RuntimeMetadataR\x07runtime\x12\x33\n\x07timeout\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationR\x07timeout\x12\x36\n\x07retries\x18\x05 \x01(\x0b\x32\x1c.flyteidl.core.RetryStrategyR\x07retries\x12+\n\x11\x64iscovery_version\x18\x06 \x01(\tR\x10\x64iscoveryVersion\x12\x38\n\x18\x64\x65precated_error_message\x18\x07 \x01(\tR\x16\x64\x65precatedErrorMessage\x12&\n\rinterruptible\x18\x08 \x01(\x08H\x00R\rinterruptible\x12-\n\x12\x63\x61\x63he_serializable\x18\t \x01(\x08R\x11\x63\x61\x63heSerializable\x12%\n\x0egenerates_deck\x18\n \x01(\x08R\rgeneratesDeck\x12\x39\n\x04tags\x18\x0b \x03(\x0b\x32%.flyteidl.core.TaskMetadata.TagsEntryR\x04tags\x12*\n\x11pod_template_name\x18\x0c \x01(\tR\x0fpodTemplateName\x12\x35\n\x17\x63\x61\x63he_ignore_input_vars\x18\r \x03(\tR\x14\x63\x61\x63heIgnoreInputVars\x12\x19\n\x08is_eager\x18\x0e \x01(\x08R\x07isEager\x1a\x37\n\tTagsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x15\n\x13interruptible_value\"\xd6\x05\n\x0cTaskTemplate\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x12\n\x04type\x18\x02 \x01(\tR\x04type\x12\x37\n\x08metadata\x18\x03 \x01(\x0b\x32\x1b.flyteidl.core.TaskMetadataR\x08metadata\x12;\n\tinterface\x18\x04 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\tinterface\x12/\n\x06\x63ustom\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructR\x06\x63ustom\x12\x38\n\tcontainer\x18\x06 \x01(\x0b\x32\x18.flyteidl.core.ContainerH\x00R\tcontainer\x12\x30\n\x07k8s_pod\x18\x11 \x01(\x0b\x32\x15.flyteidl.core.K8sPodH\x00R\x06k8sPod\x12&\n\x03sql\x18\x12 \x01(\x0b\x32\x12.flyteidl.core.SqlH\x00R\x03sql\x12*\n\x11task_type_version\x18\x07 \x01(\x05R\x0ftaskTypeVersion\x12I\n\x10security_context\x18\x08 \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12O\n\x12\x65xtended_resources\x18\t \x01(\x0b\x32 .flyteidl.core.ExtendedResourcesR\x11\x65xtendedResources\x12?\n\x06\x63onfig\x18\x10 \x03(\x0b\x32\'.flyteidl.core.TaskTemplate.ConfigEntryR\x06\x63onfig\x1a\x39\n\x0b\x43onfigEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x08\n\x06target\"6\n\rContainerPort\x12%\n\x0e\x63ontainer_port\x18\x01 \x01(\rR\rcontainerPort\"\xfc\x03\n\tContainer\x12\x14\n\x05image\x18\x01 \x01(\tR\x05image\x12\x18\n\x07\x63ommand\x18\x02 \x03(\tR\x07\x63ommand\x12\x12\n\x04\x61rgs\x18\x03 \x03(\tR\x04\x61rgs\x12\x36\n\tresources\x18\x04 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresources\x12-\n\x03\x65nv\x18\x05 \x03(\x0b\x32\x1b.flyteidl.core.KeyValuePairR\x03\x65nv\x12\x37\n\x06\x63onfig\x18\x06 \x03(\x0b\x32\x1b.flyteidl.core.KeyValuePairB\x02\x18\x01R\x06\x63onfig\x12\x32\n\x05ports\x18\x07 \x03(\x0b\x32\x1c.flyteidl.core.ContainerPortR\x05ports\x12\x41\n\x0b\x64\x61ta_config\x18\t \x01(\x0b\x32 .flyteidl.core.DataLoadingConfigR\ndataConfig\x12I\n\x0c\x61rchitecture\x18\n \x01(\x0e\x32%.flyteidl.core.Container.ArchitectureR\x0c\x61rchitecture\"I\n\x0c\x41rchitecture\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05\x41MD64\x10\x01\x12\t\n\x05\x41RM64\x10\x02\x12\n\n\x06\x41RM_V6\x10\x03\x12\n\n\x06\x41RM_V7\x10\x04\"\xb5\x02\n\nIOStrategy\x12K\n\rdownload_mode\x18\x01 \x01(\x0e\x32&.flyteidl.core.IOStrategy.DownloadModeR\x0c\x64ownloadMode\x12\x45\n\x0bupload_mode\x18\x02 \x01(\x0e\x32$.flyteidl.core.IOStrategy.UploadModeR\nuploadMode\"L\n\x0c\x44ownloadMode\x12\x12\n\x0e\x44OWNLOAD_EAGER\x10\x00\x12\x13\n\x0f\x44OWNLOAD_STREAM\x10\x01\x12\x13\n\x0f\x44O_NOT_DOWNLOAD\x10\x02\"E\n\nUploadMode\x12\x12\n\x0eUPLOAD_ON_EXIT\x10\x00\x12\x10\n\x0cUPLOAD_EAGER\x10\x01\x12\x11\n\rDO_NOT_UPLOAD\x10\x02\"\xa7\x02\n\x11\x44\x61taLoadingConfig\x12\x18\n\x07\x65nabled\x18\x01 \x01(\x08R\x07\x65nabled\x12\x1d\n\ninput_path\x18\x02 \x01(\tR\tinputPath\x12\x1f\n\x0boutput_path\x18\x03 \x01(\tR\noutputPath\x12I\n\x06\x66ormat\x18\x04 \x01(\x0e\x32\x31.flyteidl.core.DataLoadingConfig.LiteralMapFormatR\x06\x66ormat\x12:\n\x0bio_strategy\x18\x05 \x01(\x0b\x32\x19.flyteidl.core.IOStrategyR\nioStrategy\"1\n\x10LiteralMapFormat\x12\x08\n\x04JSON\x10\x00\x12\x08\n\x04YAML\x10\x01\x12\t\n\x05PROTO\x10\x02\"\xbd\x01\n\x06K8sPod\x12<\n\x08metadata\x18\x01 \x01(\x0b\x32 .flyteidl.core.K8sObjectMetadataR\x08metadata\x12\x32\n\x08pod_spec\x18\x02 \x01(\x0b\x32\x17.google.protobuf.StructR\x07podSpec\x12\x41\n\x0b\x64\x61ta_config\x18\x03 \x01(\x0b\x32 .flyteidl.core.DataLoadingConfigR\ndataConfig\"\xa9\x02\n\x11K8sObjectMetadata\x12\x44\n\x06labels\x18\x01 \x03(\x0b\x32,.flyteidl.core.K8sObjectMetadata.LabelsEntryR\x06labels\x12S\n\x0b\x61nnotations\x18\x02 \x03(\x0b\x32\x31.flyteidl.core.K8sObjectMetadata.AnnotationsEntryR\x0b\x61nnotations\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a>\n\x10\x41nnotationsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x92\x01\n\x03Sql\x12\x1c\n\tstatement\x18\x01 \x01(\tR\tstatement\x12\x34\n\x07\x64ialect\x18\x02 \x01(\x0e\x32\x1a.flyteidl.core.Sql.DialectR\x07\x64ialect\"7\n\x07\x44ialect\x12\r\n\tUNDEFINED\x10\x00\x12\x08\n\x04\x41NSI\x10\x01\x12\x08\n\x04HIVE\x10\x02\x12\t\n\x05OTHER\x10\x03\x42\xb0\x01\n\x11\x63om.flyteidl.coreB\nTasksProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -53,39 +53,39 @@ _globals['_RUNTIMEMETADATA_RUNTIMETYPE']._serialized_start=943 _globals['_RUNTIMEMETADATA_RUNTIMETYPE']._serialized_end=982 _globals['_TASKMETADATA']._serialized_start=985 - _globals['_TASKMETADATA']._serialized_end=1669 - _globals['_TASKMETADATA_TAGSENTRY']._serialized_start=1591 - _globals['_TASKMETADATA_TAGSENTRY']._serialized_end=1646 - _globals['_TASKTEMPLATE']._serialized_start=1672 - _globals['_TASKTEMPLATE']._serialized_end=2398 - _globals['_TASKTEMPLATE_CONFIGENTRY']._serialized_start=2331 - _globals['_TASKTEMPLATE_CONFIGENTRY']._serialized_end=2388 - _globals['_CONTAINERPORT']._serialized_start=2400 - _globals['_CONTAINERPORT']._serialized_end=2454 - _globals['_CONTAINER']._serialized_start=2457 - _globals['_CONTAINER']._serialized_end=2965 - _globals['_CONTAINER_ARCHITECTURE']._serialized_start=2892 - _globals['_CONTAINER_ARCHITECTURE']._serialized_end=2965 - _globals['_IOSTRATEGY']._serialized_start=2968 - _globals['_IOSTRATEGY']._serialized_end=3277 - _globals['_IOSTRATEGY_DOWNLOADMODE']._serialized_start=3130 - _globals['_IOSTRATEGY_DOWNLOADMODE']._serialized_end=3206 - _globals['_IOSTRATEGY_UPLOADMODE']._serialized_start=3208 - _globals['_IOSTRATEGY_UPLOADMODE']._serialized_end=3277 - _globals['_DATALOADINGCONFIG']._serialized_start=3280 - _globals['_DATALOADINGCONFIG']._serialized_end=3575 - _globals['_DATALOADINGCONFIG_LITERALMAPFORMAT']._serialized_start=3526 - _globals['_DATALOADINGCONFIG_LITERALMAPFORMAT']._serialized_end=3575 - _globals['_K8SPOD']._serialized_start=3578 - _globals['_K8SPOD']._serialized_end=3767 - _globals['_K8SOBJECTMETADATA']._serialized_start=3770 - _globals['_K8SOBJECTMETADATA']._serialized_end=4067 - _globals['_K8SOBJECTMETADATA_LABELSENTRY']._serialized_start=3946 - _globals['_K8SOBJECTMETADATA_LABELSENTRY']._serialized_end=4003 - _globals['_K8SOBJECTMETADATA_ANNOTATIONSENTRY']._serialized_start=4005 - _globals['_K8SOBJECTMETADATA_ANNOTATIONSENTRY']._serialized_end=4067 - _globals['_SQL']._serialized_start=4070 - _globals['_SQL']._serialized_end=4216 - _globals['_SQL_DIALECT']._serialized_start=4161 - _globals['_SQL_DIALECT']._serialized_end=4216 + _globals['_TASKMETADATA']._serialized_end=1696 + _globals['_TASKMETADATA_TAGSENTRY']._serialized_start=1618 + _globals['_TASKMETADATA_TAGSENTRY']._serialized_end=1673 + _globals['_TASKTEMPLATE']._serialized_start=1699 + _globals['_TASKTEMPLATE']._serialized_end=2425 + _globals['_TASKTEMPLATE_CONFIGENTRY']._serialized_start=2358 + _globals['_TASKTEMPLATE_CONFIGENTRY']._serialized_end=2415 + _globals['_CONTAINERPORT']._serialized_start=2427 + _globals['_CONTAINERPORT']._serialized_end=2481 + _globals['_CONTAINER']._serialized_start=2484 + _globals['_CONTAINER']._serialized_end=2992 + _globals['_CONTAINER_ARCHITECTURE']._serialized_start=2919 + _globals['_CONTAINER_ARCHITECTURE']._serialized_end=2992 + _globals['_IOSTRATEGY']._serialized_start=2995 + _globals['_IOSTRATEGY']._serialized_end=3304 + _globals['_IOSTRATEGY_DOWNLOADMODE']._serialized_start=3157 + _globals['_IOSTRATEGY_DOWNLOADMODE']._serialized_end=3233 + _globals['_IOSTRATEGY_UPLOADMODE']._serialized_start=3235 + _globals['_IOSTRATEGY_UPLOADMODE']._serialized_end=3304 + _globals['_DATALOADINGCONFIG']._serialized_start=3307 + _globals['_DATALOADINGCONFIG']._serialized_end=3602 + _globals['_DATALOADINGCONFIG_LITERALMAPFORMAT']._serialized_start=3553 + _globals['_DATALOADINGCONFIG_LITERALMAPFORMAT']._serialized_end=3602 + _globals['_K8SPOD']._serialized_start=3605 + _globals['_K8SPOD']._serialized_end=3794 + _globals['_K8SOBJECTMETADATA']._serialized_start=3797 + _globals['_K8SOBJECTMETADATA']._serialized_end=4094 + _globals['_K8SOBJECTMETADATA_LABELSENTRY']._serialized_start=3973 + _globals['_K8SOBJECTMETADATA_LABELSENTRY']._serialized_end=4030 + _globals['_K8SOBJECTMETADATA_ANNOTATIONSENTRY']._serialized_start=4032 + _globals['_K8SOBJECTMETADATA_ANNOTATIONSENTRY']._serialized_end=4094 + _globals['_SQL']._serialized_start=4097 + _globals['_SQL']._serialized_end=4243 + _globals['_SQL_DIALECT']._serialized_start=4188 + _globals['_SQL_DIALECT']._serialized_end=4243 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.pyi index 98d1792aee..9e79c295ec 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/core/tasks_pb2.pyi @@ -74,7 +74,7 @@ class RuntimeMetadata(_message.Message): def __init__(self, type: _Optional[_Union[RuntimeMetadata.RuntimeType, str]] = ..., version: _Optional[str] = ..., flavor: _Optional[str] = ...) -> None: ... class TaskMetadata(_message.Message): - __slots__ = ["discoverable", "runtime", "timeout", "retries", "discovery_version", "deprecated_error_message", "interruptible", "cache_serializable", "generates_deck", "tags", "pod_template_name", "cache_ignore_input_vars"] + __slots__ = ["discoverable", "runtime", "timeout", "retries", "discovery_version", "deprecated_error_message", "interruptible", "cache_serializable", "generates_deck", "tags", "pod_template_name", "cache_ignore_input_vars", "is_eager"] class TagsEntry(_message.Message): __slots__ = ["key", "value"] KEY_FIELD_NUMBER: _ClassVar[int] @@ -94,6 +94,7 @@ class TaskMetadata(_message.Message): TAGS_FIELD_NUMBER: _ClassVar[int] POD_TEMPLATE_NAME_FIELD_NUMBER: _ClassVar[int] CACHE_IGNORE_INPUT_VARS_FIELD_NUMBER: _ClassVar[int] + IS_EAGER_FIELD_NUMBER: _ClassVar[int] discoverable: bool runtime: RuntimeMetadata timeout: _duration_pb2.Duration @@ -106,7 +107,8 @@ class TaskMetadata(_message.Message): tags: _containers.ScalarMap[str, str] pod_template_name: str cache_ignore_input_vars: _containers.RepeatedScalarFieldContainer[str] - def __init__(self, discoverable: bool = ..., runtime: _Optional[_Union[RuntimeMetadata, _Mapping]] = ..., timeout: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., retries: _Optional[_Union[_literals_pb2.RetryStrategy, _Mapping]] = ..., discovery_version: _Optional[str] = ..., deprecated_error_message: _Optional[str] = ..., interruptible: bool = ..., cache_serializable: bool = ..., generates_deck: bool = ..., tags: _Optional[_Mapping[str, str]] = ..., pod_template_name: _Optional[str] = ..., cache_ignore_input_vars: _Optional[_Iterable[str]] = ...) -> None: ... + is_eager: bool + def __init__(self, discoverable: bool = ..., runtime: _Optional[_Union[RuntimeMetadata, _Mapping]] = ..., timeout: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., retries: _Optional[_Union[_literals_pb2.RetryStrategy, _Mapping]] = ..., discovery_version: _Optional[str] = ..., deprecated_error_message: _Optional[str] = ..., interruptible: bool = ..., cache_serializable: bool = ..., generates_deck: bool = ..., tags: _Optional[_Mapping[str, str]] = ..., pod_template_name: _Optional[str] = ..., cache_ignore_input_vars: _Optional[_Iterable[str]] = ..., is_eager: bool = ...) -> None: ... class TaskTemplate(_message.Message): __slots__ = ["id", "type", "metadata", "interface", "custom", "container", "k8s_pod", "sql", "task_type_version", "security_context", "extended_resources", "config"] diff --git a/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.py b/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.py index 0c62aca3ad..4f30f4d632 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.py @@ -23,7 +23,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/core/workflow.proto\x12\rflyteidl.core\x1a\x1d\x66lyteidl/core/condition.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x19\x66lyteidl/core/types.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1egoogle/protobuf/wrappers.proto\"{\n\x07IfBlock\x12>\n\tcondition\x18\x01 \x01(\x0b\x32 .flyteidl.core.BooleanExpressionR\tcondition\x12\x30\n\tthen_node\x18\x02 \x01(\x0b\x32\x13.flyteidl.core.NodeR\x08thenNode\"\xd4\x01\n\x0bIfElseBlock\x12*\n\x04\x63\x61se\x18\x01 \x01(\x0b\x32\x16.flyteidl.core.IfBlockR\x04\x63\x61se\x12,\n\x05other\x18\x02 \x03(\x0b\x32\x16.flyteidl.core.IfBlockR\x05other\x12\x32\n\telse_node\x18\x03 \x01(\x0b\x32\x13.flyteidl.core.NodeH\x00R\x08\x65lseNode\x12,\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x14.flyteidl.core.ErrorH\x00R\x05\x65rrorB\t\n\x07\x64\x65\x66\x61ult\"A\n\nBranchNode\x12\x33\n\x07if_else\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.IfElseBlockR\x06ifElse\"\x97\x01\n\x08TaskNode\x12>\n\x0creference_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierH\x00R\x0breferenceId\x12>\n\toverrides\x18\x02 \x01(\x0b\x32 .flyteidl.core.TaskNodeOverridesR\toverridesB\x0b\n\treference\"\xa6\x01\n\x0cWorkflowNode\x12\x42\n\x0elaunchplan_ref\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierH\x00R\rlaunchplanRef\x12\x45\n\x10sub_workflow_ref\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.IdentifierH\x00R\x0esubWorkflowRefB\x0b\n\treference\"/\n\x10\x41pproveCondition\x12\x1b\n\tsignal_id\x18\x01 \x01(\tR\x08signalId\"\x90\x01\n\x0fSignalCondition\x12\x1b\n\tsignal_id\x18\x01 \x01(\tR\x08signalId\x12.\n\x04type\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\x04type\x12\x30\n\x14output_variable_name\x18\x03 \x01(\tR\x12outputVariableName\"G\n\x0eSleepCondition\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\"\xc5\x01\n\x08GateNode\x12;\n\x07\x61pprove\x18\x01 \x01(\x0b\x32\x1f.flyteidl.core.ApproveConditionH\x00R\x07\x61pprove\x12\x38\n\x06signal\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.SignalConditionH\x00R\x06signal\x12\x35\n\x05sleep\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.SleepConditionH\x00R\x05sleepB\x0b\n\tcondition\"\xda\x02\n\tArrayNode\x12\'\n\x04node\x18\x01 \x01(\x0b\x32\x13.flyteidl.core.NodeR\x04node\x12\"\n\x0bparallelism\x18\x02 \x01(\rH\x00R\x0bparallelism\x12%\n\rmin_successes\x18\x03 \x01(\rH\x01R\x0cminSuccesses\x12,\n\x11min_success_ratio\x18\x04 \x01(\x02H\x01R\x0fminSuccessRatio\x12M\n\x0e\x65xecution_mode\x18\x05 \x01(\x0e\x32&.flyteidl.core.ArrayNode.ExecutionModeR\rexecutionMode\"2\n\rExecutionMode\x12\x11\n\rMINIMAL_STATE\x10\x00\x12\x0e\n\nFULL_STATE\x10\x01\x42\x14\n\x12parallelism_optionB\x12\n\x10success_criteria\"\x8c\x03\n\x0cNodeMetadata\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x33\n\x07timeout\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationR\x07timeout\x12\x36\n\x07retries\x18\x05 \x01(\x0b\x32\x1c.flyteidl.core.RetryStrategyR\x07retries\x12&\n\rinterruptible\x18\x06 \x01(\x08H\x00R\rinterruptible\x12\x1e\n\tcacheable\x18\x07 \x01(\x08H\x01R\tcacheable\x12%\n\rcache_version\x18\x08 \x01(\tH\x02R\x0c\x63\x61\x63heVersion\x12/\n\x12\x63\x61\x63he_serializable\x18\t \x01(\x08H\x03R\x11\x63\x61\x63heSerializableB\x15\n\x13interruptible_valueB\x11\n\x0f\x63\x61\x63heable_valueB\x15\n\x13\x63\x61\x63he_version_valueB\x1a\n\x18\x63\x61\x63he_serializable_value\"/\n\x05\x41lias\x12\x10\n\x03var\x18\x01 \x01(\tR\x03var\x12\x14\n\x05\x61lias\x18\x02 \x01(\tR\x05\x61lias\"\x9f\x04\n\x04Node\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x37\n\x08metadata\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.NodeMetadataR\x08metadata\x12.\n\x06inputs\x18\x03 \x03(\x0b\x32\x16.flyteidl.core.BindingR\x06inputs\x12*\n\x11upstream_node_ids\x18\x04 \x03(\tR\x0fupstreamNodeIds\x12;\n\x0eoutput_aliases\x18\x05 \x03(\x0b\x32\x14.flyteidl.core.AliasR\routputAliases\x12\x36\n\ttask_node\x18\x06 \x01(\x0b\x32\x17.flyteidl.core.TaskNodeH\x00R\x08taskNode\x12\x42\n\rworkflow_node\x18\x07 \x01(\x0b\x32\x1b.flyteidl.core.WorkflowNodeH\x00R\x0cworkflowNode\x12<\n\x0b\x62ranch_node\x18\x08 \x01(\x0b\x32\x19.flyteidl.core.BranchNodeH\x00R\nbranchNode\x12\x36\n\tgate_node\x18\t \x01(\x0b\x32\x17.flyteidl.core.GateNodeH\x00R\x08gateNode\x12\x39\n\narray_node\x18\n \x01(\x0b\x32\x18.flyteidl.core.ArrayNodeH\x00R\tarrayNodeB\x08\n\x06target\"\xfc\x02\n\x10WorkflowMetadata\x12M\n\x12quality_of_service\x18\x01 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12N\n\non_failure\x18\x02 \x01(\x0e\x32/.flyteidl.core.WorkflowMetadata.OnFailurePolicyR\tonFailure\x12=\n\x04tags\x18\x03 \x03(\x0b\x32).flyteidl.core.WorkflowMetadata.TagsEntryR\x04tags\x1a\x37\n\tTagsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"Q\n\x0fOnFailurePolicy\x12\x14\n\x10\x46\x41IL_IMMEDIATELY\x10\x00\x12(\n$FAIL_AFTER_EXECUTABLE_NODES_COMPLETE\x10\x01\"@\n\x18WorkflowMetadataDefaults\x12$\n\rinterruptible\x18\x01 \x01(\x08R\rinterruptible\"\xa2\x03\n\x10WorkflowTemplate\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12;\n\x08metadata\x18\x02 \x01(\x0b\x32\x1f.flyteidl.core.WorkflowMetadataR\x08metadata\x12;\n\tinterface\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\tinterface\x12)\n\x05nodes\x18\x04 \x03(\x0b\x32\x13.flyteidl.core.NodeR\x05nodes\x12\x30\n\x07outputs\x18\x05 \x03(\x0b\x32\x16.flyteidl.core.BindingR\x07outputs\x12\x36\n\x0c\x66\x61ilure_node\x18\x06 \x01(\x0b\x32\x13.flyteidl.core.NodeR\x0b\x66\x61ilureNode\x12T\n\x11metadata_defaults\x18\x07 \x01(\x0b\x32\'.flyteidl.core.WorkflowMetadataDefaultsR\x10metadataDefaults\"\xc5\x01\n\x11TaskNodeOverrides\x12\x36\n\tresources\x18\x01 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresources\x12O\n\x12\x65xtended_resources\x18\x02 \x01(\x0b\x32 .flyteidl.core.ExtendedResourcesR\x11\x65xtendedResources\x12\'\n\x0f\x63ontainer_image\x18\x03 \x01(\tR\x0e\x63ontainerImage\"\xba\x01\n\x12LaunchPlanTemplate\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12;\n\tinterface\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\tinterface\x12<\n\x0c\x66ixed_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ixedInputsB\xb3\x01\n\x11\x63om.flyteidl.coreB\rWorkflowProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/core/workflow.proto\x12\rflyteidl.core\x1a\x1d\x66lyteidl/core/condition.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x19\x66lyteidl/core/types.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1egoogle/protobuf/wrappers.proto\"{\n\x07IfBlock\x12>\n\tcondition\x18\x01 \x01(\x0b\x32 .flyteidl.core.BooleanExpressionR\tcondition\x12\x30\n\tthen_node\x18\x02 \x01(\x0b\x32\x13.flyteidl.core.NodeR\x08thenNode\"\xd4\x01\n\x0bIfElseBlock\x12*\n\x04\x63\x61se\x18\x01 \x01(\x0b\x32\x16.flyteidl.core.IfBlockR\x04\x63\x61se\x12,\n\x05other\x18\x02 \x03(\x0b\x32\x16.flyteidl.core.IfBlockR\x05other\x12\x32\n\telse_node\x18\x03 \x01(\x0b\x32\x13.flyteidl.core.NodeH\x00R\x08\x65lseNode\x12,\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x14.flyteidl.core.ErrorH\x00R\x05\x65rrorB\t\n\x07\x64\x65\x66\x61ult\"A\n\nBranchNode\x12\x33\n\x07if_else\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.IfElseBlockR\x06ifElse\"\x97\x01\n\x08TaskNode\x12>\n\x0creference_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierH\x00R\x0breferenceId\x12>\n\toverrides\x18\x02 \x01(\x0b\x32 .flyteidl.core.TaskNodeOverridesR\toverridesB\x0b\n\treference\"\xa6\x01\n\x0cWorkflowNode\x12\x42\n\x0elaunchplan_ref\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierH\x00R\rlaunchplanRef\x12\x45\n\x10sub_workflow_ref\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.IdentifierH\x00R\x0esubWorkflowRefB\x0b\n\treference\"/\n\x10\x41pproveCondition\x12\x1b\n\tsignal_id\x18\x01 \x01(\tR\x08signalId\"\x90\x01\n\x0fSignalCondition\x12\x1b\n\tsignal_id\x18\x01 \x01(\tR\x08signalId\x12.\n\x04type\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\x04type\x12\x30\n\x14output_variable_name\x18\x03 \x01(\tR\x12outputVariableName\"G\n\x0eSleepCondition\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\"\xc5\x01\n\x08GateNode\x12;\n\x07\x61pprove\x18\x01 \x01(\x0b\x32\x1f.flyteidl.core.ApproveConditionH\x00R\x07\x61pprove\x12\x38\n\x06signal\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.SignalConditionH\x00R\x06signal\x12\x35\n\x05sleep\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.SleepConditionH\x00R\x05sleepB\x0b\n\tcondition\"\xb9\x04\n\tArrayNode\x12\'\n\x04node\x18\x01 \x01(\x0b\x32\x13.flyteidl.core.NodeR\x04node\x12\"\n\x0bparallelism\x18\x02 \x01(\rH\x00R\x0bparallelism\x12%\n\rmin_successes\x18\x03 \x01(\rH\x01R\x0cminSuccesses\x12,\n\x11min_success_ratio\x18\x04 \x01(\x02H\x01R\x0fminSuccessRatio\x12M\n\x0e\x65xecution_mode\x18\x05 \x01(\x0e\x32&.flyteidl.core.ArrayNode.ExecutionModeR\rexecutionMode\x12^\n\x1eis_original_sub_node_interface\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\x1aisOriginalSubNodeInterface\x12>\n\tdata_mode\x18\x07 \x01(\x0e\x32!.flyteidl.core.ArrayNode.DataModeR\x08\x64\x61taMode\"2\n\rExecutionMode\x12\x11\n\rMINIMAL_STATE\x10\x00\x12\x0e\n\nFULL_STATE\x10\x01\"=\n\x08\x44\x61taMode\x12\x15\n\x11SINGLE_INPUT_FILE\x10\x00\x12\x1a\n\x16INDIVIDUAL_INPUT_FILES\x10\x01\x42\x14\n\x12parallelism_optionB\x12\n\x10success_criteria\"\x8c\x03\n\x0cNodeMetadata\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x33\n\x07timeout\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationR\x07timeout\x12\x36\n\x07retries\x18\x05 \x01(\x0b\x32\x1c.flyteidl.core.RetryStrategyR\x07retries\x12&\n\rinterruptible\x18\x06 \x01(\x08H\x00R\rinterruptible\x12\x1e\n\tcacheable\x18\x07 \x01(\x08H\x01R\tcacheable\x12%\n\rcache_version\x18\x08 \x01(\tH\x02R\x0c\x63\x61\x63heVersion\x12/\n\x12\x63\x61\x63he_serializable\x18\t \x01(\x08H\x03R\x11\x63\x61\x63heSerializableB\x15\n\x13interruptible_valueB\x11\n\x0f\x63\x61\x63heable_valueB\x15\n\x13\x63\x61\x63he_version_valueB\x1a\n\x18\x63\x61\x63he_serializable_value\"/\n\x05\x41lias\x12\x10\n\x03var\x18\x01 \x01(\tR\x03var\x12\x14\n\x05\x61lias\x18\x02 \x01(\tR\x05\x61lias\"\x9f\x04\n\x04Node\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x37\n\x08metadata\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.NodeMetadataR\x08metadata\x12.\n\x06inputs\x18\x03 \x03(\x0b\x32\x16.flyteidl.core.BindingR\x06inputs\x12*\n\x11upstream_node_ids\x18\x04 \x03(\tR\x0fupstreamNodeIds\x12;\n\x0eoutput_aliases\x18\x05 \x03(\x0b\x32\x14.flyteidl.core.AliasR\routputAliases\x12\x36\n\ttask_node\x18\x06 \x01(\x0b\x32\x17.flyteidl.core.TaskNodeH\x00R\x08taskNode\x12\x42\n\rworkflow_node\x18\x07 \x01(\x0b\x32\x1b.flyteidl.core.WorkflowNodeH\x00R\x0cworkflowNode\x12<\n\x0b\x62ranch_node\x18\x08 \x01(\x0b\x32\x19.flyteidl.core.BranchNodeH\x00R\nbranchNode\x12\x36\n\tgate_node\x18\t \x01(\x0b\x32\x17.flyteidl.core.GateNodeH\x00R\x08gateNode\x12\x39\n\narray_node\x18\n \x01(\x0b\x32\x18.flyteidl.core.ArrayNodeH\x00R\tarrayNodeB\x08\n\x06target\"\xfc\x02\n\x10WorkflowMetadata\x12M\n\x12quality_of_service\x18\x01 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12N\n\non_failure\x18\x02 \x01(\x0e\x32/.flyteidl.core.WorkflowMetadata.OnFailurePolicyR\tonFailure\x12=\n\x04tags\x18\x03 \x03(\x0b\x32).flyteidl.core.WorkflowMetadata.TagsEntryR\x04tags\x1a\x37\n\tTagsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"Q\n\x0fOnFailurePolicy\x12\x14\n\x10\x46\x41IL_IMMEDIATELY\x10\x00\x12(\n$FAIL_AFTER_EXECUTABLE_NODES_COMPLETE\x10\x01\"@\n\x18WorkflowMetadataDefaults\x12$\n\rinterruptible\x18\x01 \x01(\x08R\rinterruptible\"\xa2\x03\n\x10WorkflowTemplate\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12;\n\x08metadata\x18\x02 \x01(\x0b\x32\x1f.flyteidl.core.WorkflowMetadataR\x08metadata\x12;\n\tinterface\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\tinterface\x12)\n\x05nodes\x18\x04 \x03(\x0b\x32\x13.flyteidl.core.NodeR\x05nodes\x12\x30\n\x07outputs\x18\x05 \x03(\x0b\x32\x16.flyteidl.core.BindingR\x07outputs\x12\x36\n\x0c\x66\x61ilure_node\x18\x06 \x01(\x0b\x32\x13.flyteidl.core.NodeR\x0b\x66\x61ilureNode\x12T\n\x11metadata_defaults\x18\x07 \x01(\x0b\x32\'.flyteidl.core.WorkflowMetadataDefaultsR\x10metadataDefaults\"\xc5\x01\n\x11TaskNodeOverrides\x12\x36\n\tresources\x18\x01 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresources\x12O\n\x12\x65xtended_resources\x18\x02 \x01(\x0b\x32 .flyteidl.core.ExtendedResourcesR\x11\x65xtendedResources\x12\'\n\x0f\x63ontainer_image\x18\x03 \x01(\tR\x0e\x63ontainerImage\"\xba\x01\n\x12LaunchPlanTemplate\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12;\n\tinterface\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\tinterface\x12<\n\x0c\x66ixed_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ixedInputsB\xb3\x01\n\x11\x63om.flyteidl.coreB\rWorkflowProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -53,27 +53,29 @@ _globals['_GATENODE']._serialized_start=1350 _globals['_GATENODE']._serialized_end=1547 _globals['_ARRAYNODE']._serialized_start=1550 - _globals['_ARRAYNODE']._serialized_end=1896 - _globals['_ARRAYNODE_EXECUTIONMODE']._serialized_start=1804 - _globals['_ARRAYNODE_EXECUTIONMODE']._serialized_end=1854 - _globals['_NODEMETADATA']._serialized_start=1899 - _globals['_NODEMETADATA']._serialized_end=2295 - _globals['_ALIAS']._serialized_start=2297 - _globals['_ALIAS']._serialized_end=2344 - _globals['_NODE']._serialized_start=2347 - _globals['_NODE']._serialized_end=2890 - _globals['_WORKFLOWMETADATA']._serialized_start=2893 - _globals['_WORKFLOWMETADATA']._serialized_end=3273 - _globals['_WORKFLOWMETADATA_TAGSENTRY']._serialized_start=3135 - _globals['_WORKFLOWMETADATA_TAGSENTRY']._serialized_end=3190 - _globals['_WORKFLOWMETADATA_ONFAILUREPOLICY']._serialized_start=3192 - _globals['_WORKFLOWMETADATA_ONFAILUREPOLICY']._serialized_end=3273 - _globals['_WORKFLOWMETADATADEFAULTS']._serialized_start=3275 - _globals['_WORKFLOWMETADATADEFAULTS']._serialized_end=3339 - _globals['_WORKFLOWTEMPLATE']._serialized_start=3342 - _globals['_WORKFLOWTEMPLATE']._serialized_end=3760 - _globals['_TASKNODEOVERRIDES']._serialized_start=3763 - _globals['_TASKNODEOVERRIDES']._serialized_end=3960 - _globals['_LAUNCHPLANTEMPLATE']._serialized_start=3963 - _globals['_LAUNCHPLANTEMPLATE']._serialized_end=4149 + _globals['_ARRAYNODE']._serialized_end=2119 + _globals['_ARRAYNODE_EXECUTIONMODE']._serialized_start=1964 + _globals['_ARRAYNODE_EXECUTIONMODE']._serialized_end=2014 + _globals['_ARRAYNODE_DATAMODE']._serialized_start=2016 + _globals['_ARRAYNODE_DATAMODE']._serialized_end=2077 + _globals['_NODEMETADATA']._serialized_start=2122 + _globals['_NODEMETADATA']._serialized_end=2518 + _globals['_ALIAS']._serialized_start=2520 + _globals['_ALIAS']._serialized_end=2567 + _globals['_NODE']._serialized_start=2570 + _globals['_NODE']._serialized_end=3113 + _globals['_WORKFLOWMETADATA']._serialized_start=3116 + _globals['_WORKFLOWMETADATA']._serialized_end=3496 + _globals['_WORKFLOWMETADATA_TAGSENTRY']._serialized_start=3358 + _globals['_WORKFLOWMETADATA_TAGSENTRY']._serialized_end=3413 + _globals['_WORKFLOWMETADATA_ONFAILUREPOLICY']._serialized_start=3415 + _globals['_WORKFLOWMETADATA_ONFAILUREPOLICY']._serialized_end=3496 + _globals['_WORKFLOWMETADATADEFAULTS']._serialized_start=3498 + _globals['_WORKFLOWMETADATADEFAULTS']._serialized_end=3562 + _globals['_WORKFLOWTEMPLATE']._serialized_start=3565 + _globals['_WORKFLOWTEMPLATE']._serialized_end=3983 + _globals['_TASKNODEOVERRIDES']._serialized_start=3986 + _globals['_TASKNODEOVERRIDES']._serialized_end=4183 + _globals['_LAUNCHPLANTEMPLATE']._serialized_start=4186 + _globals['_LAUNCHPLANTEMPLATE']._serialized_end=4372 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.pyi index 664581b0f4..bc93b230b3 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/core/workflow_pb2.pyi @@ -91,24 +91,34 @@ class GateNode(_message.Message): def __init__(self, approve: _Optional[_Union[ApproveCondition, _Mapping]] = ..., signal: _Optional[_Union[SignalCondition, _Mapping]] = ..., sleep: _Optional[_Union[SleepCondition, _Mapping]] = ...) -> None: ... class ArrayNode(_message.Message): - __slots__ = ["node", "parallelism", "min_successes", "min_success_ratio", "execution_mode"] + __slots__ = ["node", "parallelism", "min_successes", "min_success_ratio", "execution_mode", "is_original_sub_node_interface", "data_mode"] class ExecutionMode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = [] MINIMAL_STATE: _ClassVar[ArrayNode.ExecutionMode] FULL_STATE: _ClassVar[ArrayNode.ExecutionMode] MINIMAL_STATE: ArrayNode.ExecutionMode FULL_STATE: ArrayNode.ExecutionMode + class DataMode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + SINGLE_INPUT_FILE: _ClassVar[ArrayNode.DataMode] + INDIVIDUAL_INPUT_FILES: _ClassVar[ArrayNode.DataMode] + SINGLE_INPUT_FILE: ArrayNode.DataMode + INDIVIDUAL_INPUT_FILES: ArrayNode.DataMode NODE_FIELD_NUMBER: _ClassVar[int] PARALLELISM_FIELD_NUMBER: _ClassVar[int] MIN_SUCCESSES_FIELD_NUMBER: _ClassVar[int] MIN_SUCCESS_RATIO_FIELD_NUMBER: _ClassVar[int] EXECUTION_MODE_FIELD_NUMBER: _ClassVar[int] + IS_ORIGINAL_SUB_NODE_INTERFACE_FIELD_NUMBER: _ClassVar[int] + DATA_MODE_FIELD_NUMBER: _ClassVar[int] node: Node parallelism: int min_successes: int min_success_ratio: float execution_mode: ArrayNode.ExecutionMode - def __init__(self, node: _Optional[_Union[Node, _Mapping]] = ..., parallelism: _Optional[int] = ..., min_successes: _Optional[int] = ..., min_success_ratio: _Optional[float] = ..., execution_mode: _Optional[_Union[ArrayNode.ExecutionMode, str]] = ...) -> None: ... + is_original_sub_node_interface: _wrappers_pb2.BoolValue + data_mode: ArrayNode.DataMode + def __init__(self, node: _Optional[_Union[Node, _Mapping]] = ..., parallelism: _Optional[int] = ..., min_successes: _Optional[int] = ..., min_success_ratio: _Optional[float] = ..., execution_mode: _Optional[_Union[ArrayNode.ExecutionMode, str]] = ..., is_original_sub_node_interface: _Optional[_Union[_wrappers_pb2.BoolValue, _Mapping]] = ..., data_mode: _Optional[_Union[ArrayNode.DataMode, str]] = ...) -> None: ... class NodeMetadata(_message.Message): __slots__ = ["name", "timeout", "retries", "interruptible", "cacheable", "cache_version", "cache_serializable"] diff --git a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py index 7addfe281f..606897f097 100644 --- a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py @@ -19,7 +19,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n flyteidl/event/cloudevents.proto\x12\x0e\x66lyteidl.event\x1a\x1a\x66lyteidl/event/event.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa6\x03\n\x1b\x43loudEventWorkflowExecution\x12\x43\n\traw_event\x18\x01 \x01(\x0b\x32&.flyteidl.event.WorkflowExecutionEventR\x08rawEvent\x12H\n\x10output_interface\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12<\n\x0c\x61rtifact_ids\x18\x03 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12[\n\x13reference_execution\x18\x04 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12\x1c\n\tprincipal\x18\x05 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x06 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\"\x8b\x03\n\x17\x43loudEventNodeExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.NodeExecutionEventR\x08rawEvent\x12H\n\x0ctask_exec_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\ntaskExecId\x12H\n\x10output_interface\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12<\n\x0c\x61rtifact_ids\x18\x04 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12\x1c\n\tprincipal\x18\x05 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x06 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\"Z\n\x17\x43loudEventTaskExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.TaskExecutionEventR\x08rawEvent\"\xef\x02\n\x18\x43loudEventExecutionStart\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\x12?\n\x0elaunch_plan_id\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\x12:\n\x0bworkflow_id\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12<\n\x0c\x61rtifact_ids\x18\x04 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12+\n\x11\x61rtifact_trackers\x18\x05 \x03(\tR\x10\x61rtifactTrackers\x12\x1c\n\tprincipal\x18\x06 \x01(\tR\tprincipalB\xbc\x01\n\x12\x63om.flyteidl.eventB\x10\x43loudeventsProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\xa2\x02\x03\x46\x45X\xaa\x02\x0e\x46lyteidl.Event\xca\x02\x0e\x46lyteidl\\Event\xe2\x02\x1a\x46lyteidl\\Event\\GPBMetadata\xea\x02\x0f\x46lyteidl::Eventb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n flyteidl/event/cloudevents.proto\x12\x0e\x66lyteidl.event\x1a\x1a\x66lyteidl/event/event.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb2\x04\n\x1b\x43loudEventWorkflowExecution\x12\x43\n\traw_event\x18\x01 \x01(\x0b\x32&.flyteidl.event.WorkflowExecutionEventR\x08rawEvent\x12H\n\x10output_interface\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12<\n\x0c\x61rtifact_ids\x18\x03 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12[\n\x13reference_execution\x18\x04 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12\x1c\n\tprincipal\x18\x05 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x06 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\x12O\n\x06labels\x18\x07 \x03(\x0b\x32\x37.flyteidl.event.CloudEventWorkflowExecution.LabelsEntryR\x06labels\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x93\x04\n\x17\x43loudEventNodeExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.NodeExecutionEventR\x08rawEvent\x12H\n\x0ctask_exec_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\ntaskExecId\x12H\n\x10output_interface\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12<\n\x0c\x61rtifact_ids\x18\x04 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12\x1c\n\tprincipal\x18\x05 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x06 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\x12K\n\x06labels\x18\x07 \x03(\x0b\x32\x33.flyteidl.event.CloudEventNodeExecution.LabelsEntryR\x06labels\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xe2\x01\n\x17\x43loudEventTaskExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.TaskExecutionEventR\x08rawEvent\x12K\n\x06labels\x18\x02 \x03(\x0b\x32\x33.flyteidl.event.CloudEventTaskExecution.LabelsEntryR\x06labels\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xef\x02\n\x18\x43loudEventExecutionStart\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\x12?\n\x0elaunch_plan_id\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\x12:\n\x0bworkflow_id\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12<\n\x0c\x61rtifact_ids\x18\x04 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12+\n\x11\x61rtifact_trackers\x18\x05 \x03(\tR\x10\x61rtifactTrackers\x12\x1c\n\tprincipal\x18\x06 \x01(\tR\tprincipalB\xbc\x01\n\x12\x63om.flyteidl.eventB\x10\x43loudeventsProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\xa2\x02\x03\x46\x45X\xaa\x02\x0e\x46lyteidl.Event\xca\x02\x0e\x46lyteidl\\Event\xe2\x02\x1a\x46lyteidl\\Event\\GPBMetadata\xea\x02\x0f\x46lyteidl::Eventb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -28,12 +28,24 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\022com.flyteidl.eventB\020CloudeventsProtoP\001Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\242\002\003FEX\252\002\016Flyteidl.Event\312\002\016Flyteidl\\Event\342\002\032Flyteidl\\Event\\GPBMetadata\352\002\017Flyteidl::Event' + _CLOUDEVENTWORKFLOWEXECUTION_LABELSENTRY._options = None + _CLOUDEVENTWORKFLOWEXECUTION_LABELSENTRY._serialized_options = b'8\001' + _CLOUDEVENTNODEEXECUTION_LABELSENTRY._options = None + _CLOUDEVENTNODEEXECUTION_LABELSENTRY._serialized_options = b'8\001' + _CLOUDEVENTTASKEXECUTION_LABELSENTRY._options = None + _CLOUDEVENTTASKEXECUTION_LABELSENTRY._serialized_options = b'8\001' _globals['_CLOUDEVENTWORKFLOWEXECUTION']._serialized_start=240 - _globals['_CLOUDEVENTWORKFLOWEXECUTION']._serialized_end=662 - _globals['_CLOUDEVENTNODEEXECUTION']._serialized_start=665 - _globals['_CLOUDEVENTNODEEXECUTION']._serialized_end=1060 - _globals['_CLOUDEVENTTASKEXECUTION']._serialized_start=1062 - _globals['_CLOUDEVENTTASKEXECUTION']._serialized_end=1152 - _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_start=1155 - _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_end=1522 + _globals['_CLOUDEVENTWORKFLOWEXECUTION']._serialized_end=802 + _globals['_CLOUDEVENTWORKFLOWEXECUTION_LABELSENTRY']._serialized_start=745 + _globals['_CLOUDEVENTWORKFLOWEXECUTION_LABELSENTRY']._serialized_end=802 + _globals['_CLOUDEVENTNODEEXECUTION']._serialized_start=805 + _globals['_CLOUDEVENTNODEEXECUTION']._serialized_end=1336 + _globals['_CLOUDEVENTNODEEXECUTION_LABELSENTRY']._serialized_start=745 + _globals['_CLOUDEVENTNODEEXECUTION_LABELSENTRY']._serialized_end=802 + _globals['_CLOUDEVENTTASKEXECUTION']._serialized_start=1339 + _globals['_CLOUDEVENTTASKEXECUTION']._serialized_end=1565 + _globals['_CLOUDEVENTTASKEXECUTION_LABELSENTRY']._serialized_start=745 + _globals['_CLOUDEVENTTASKEXECUTION_LABELSENTRY']._serialized_end=802 + _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_start=1568 + _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_end=1935 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi index b79750c9ca..6673a9c63c 100644 --- a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi @@ -12,42 +12,69 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor class CloudEventWorkflowExecution(_message.Message): - __slots__ = ["raw_event", "output_interface", "artifact_ids", "reference_execution", "principal", "launch_plan_id"] + __slots__ = ["raw_event", "output_interface", "artifact_ids", "reference_execution", "principal", "launch_plan_id", "labels"] + class LabelsEntry(_message.Message): + __slots__ = ["key", "value"] + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... RAW_EVENT_FIELD_NUMBER: _ClassVar[int] OUTPUT_INTERFACE_FIELD_NUMBER: _ClassVar[int] ARTIFACT_IDS_FIELD_NUMBER: _ClassVar[int] REFERENCE_EXECUTION_FIELD_NUMBER: _ClassVar[int] PRINCIPAL_FIELD_NUMBER: _ClassVar[int] LAUNCH_PLAN_ID_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] raw_event: _event_pb2.WorkflowExecutionEvent output_interface: _interface_pb2.TypedInterface artifact_ids: _containers.RepeatedCompositeFieldContainer[_artifact_id_pb2.ArtifactID] reference_execution: _identifier_pb2.WorkflowExecutionIdentifier principal: str launch_plan_id: _identifier_pb2.Identifier - def __init__(self, raw_event: _Optional[_Union[_event_pb2.WorkflowExecutionEvent, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., reference_execution: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ...) -> None: ... + labels: _containers.ScalarMap[str, str] + def __init__(self, raw_event: _Optional[_Union[_event_pb2.WorkflowExecutionEvent, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., reference_execution: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., labels: _Optional[_Mapping[str, str]] = ...) -> None: ... class CloudEventNodeExecution(_message.Message): - __slots__ = ["raw_event", "task_exec_id", "output_interface", "artifact_ids", "principal", "launch_plan_id"] + __slots__ = ["raw_event", "task_exec_id", "output_interface", "artifact_ids", "principal", "launch_plan_id", "labels"] + class LabelsEntry(_message.Message): + __slots__ = ["key", "value"] + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... RAW_EVENT_FIELD_NUMBER: _ClassVar[int] TASK_EXEC_ID_FIELD_NUMBER: _ClassVar[int] OUTPUT_INTERFACE_FIELD_NUMBER: _ClassVar[int] ARTIFACT_IDS_FIELD_NUMBER: _ClassVar[int] PRINCIPAL_FIELD_NUMBER: _ClassVar[int] LAUNCH_PLAN_ID_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] raw_event: _event_pb2.NodeExecutionEvent task_exec_id: _identifier_pb2.TaskExecutionIdentifier output_interface: _interface_pb2.TypedInterface artifact_ids: _containers.RepeatedCompositeFieldContainer[_artifact_id_pb2.ArtifactID] principal: str launch_plan_id: _identifier_pb2.Identifier - def __init__(self, raw_event: _Optional[_Union[_event_pb2.NodeExecutionEvent, _Mapping]] = ..., task_exec_id: _Optional[_Union[_identifier_pb2.TaskExecutionIdentifier, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ...) -> None: ... + labels: _containers.ScalarMap[str, str] + def __init__(self, raw_event: _Optional[_Union[_event_pb2.NodeExecutionEvent, _Mapping]] = ..., task_exec_id: _Optional[_Union[_identifier_pb2.TaskExecutionIdentifier, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., labels: _Optional[_Mapping[str, str]] = ...) -> None: ... class CloudEventTaskExecution(_message.Message): - __slots__ = ["raw_event"] + __slots__ = ["raw_event", "labels"] + class LabelsEntry(_message.Message): + __slots__ = ["key", "value"] + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... RAW_EVENT_FIELD_NUMBER: _ClassVar[int] + LABELS_FIELD_NUMBER: _ClassVar[int] raw_event: _event_pb2.TaskExecutionEvent - def __init__(self, raw_event: _Optional[_Union[_event_pb2.TaskExecutionEvent, _Mapping]] = ...) -> None: ... + labels: _containers.ScalarMap[str, str] + def __init__(self, raw_event: _Optional[_Union[_event_pb2.TaskExecutionEvent, _Mapping]] = ..., labels: _Optional[_Mapping[str, str]] = ...) -> None: ... class CloudEventExecutionStart(_message.Message): __slots__ = ["execution_id", "launch_plan_id", "workflow_id", "artifact_ids", "artifact_trackers", "principal"] diff --git a/flyteidl/gen/pb_python/flyteidl/event/event_pb2.py b/flyteidl/gen/pb_python/flyteidl/event/event_pb2.py index 5974dfe477..cd4d519762 100644 --- a/flyteidl/gen/pb_python/flyteidl/event/event_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/event/event_pb2.py @@ -20,7 +20,7 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x66lyteidl/event/event.proto\x12\x0e\x66lyteidl.event\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1c\x66lyteidl/core/compiler.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1b\x66lyteidl/core/catalog.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xaa\x03\n\x16WorkflowExecutionEvent\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\x12\x1f\n\x0bproducer_id\x18\x02 \x01(\tR\nproducerId\x12<\n\x05phase\x18\x03 \x01(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x05phase\x12;\n\x0boccurred_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1f\n\noutput_uri\x18\x05 \x01(\tH\x00R\toutputUri\x12\x35\n\x05\x65rror\x18\x06 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12<\n\x0boutput_data\x18\x07 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\noutputDataB\x0f\n\routput_result\"\x99\n\n\x12NodeExecutionEvent\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\x12\x1f\n\x0bproducer_id\x18\x02 \x01(\tR\nproducerId\x12\x38\n\x05phase\x18\x03 \x01(\x0e\x32\".flyteidl.core.NodeExecution.PhaseR\x05phase\x12;\n\x0boccurred_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1d\n\tinput_uri\x18\x05 \x01(\tH\x00R\x08inputUri\x12:\n\ninput_data\x18\x14 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\tinputData\x12\x1f\n\noutput_uri\x18\x06 \x01(\tH\x01R\toutputUri\x12\x35\n\x05\x65rror\x18\x07 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x01R\x05\x65rror\x12<\n\x0boutput_data\x18\x0f \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x01R\noutputData\x12\\\n\x16workflow_node_metadata\x18\x08 \x01(\x0b\x32$.flyteidl.event.WorkflowNodeMetadataH\x02R\x14workflowNodeMetadata\x12P\n\x12task_node_metadata\x18\x0e \x01(\x0b\x32 .flyteidl.event.TaskNodeMetadataH\x02R\x10taskNodeMetadata\x12]\n\x14parent_task_metadata\x18\t \x01(\x0b\x32+.flyteidl.event.ParentTaskExecutionMetadataR\x12parentTaskMetadata\x12]\n\x14parent_node_metadata\x18\n \x01(\x0b\x32+.flyteidl.event.ParentNodeExecutionMetadataR\x12parentNodeMetadata\x12\x1f\n\x0bretry_group\x18\x0b \x01(\tR\nretryGroup\x12 \n\x0cspec_node_id\x18\x0c \x01(\tR\nspecNodeId\x12\x1b\n\tnode_name\x18\r \x01(\tR\x08nodeName\x12#\n\revent_version\x18\x10 \x01(\x05R\x0c\x65ventVersion\x12\x1b\n\tis_parent\x18\x11 \x01(\x08R\x08isParent\x12\x1d\n\nis_dynamic\x18\x12 \x01(\x08R\tisDynamic\x12\x19\n\x08\x64\x65\x63k_uri\x18\x13 \x01(\tR\x07\x64\x65\x63kUri\x12;\n\x0breported_at\x18\x15 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\nreportedAt\x12\x19\n\x08is_array\x18\x16 \x01(\x08R\x07isArray\x12>\n\rtarget_entity\x18\x17 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0ctargetEntity\x12-\n\x13is_in_dynamic_chain\x18\x18 \x01(\x08R\x10isInDynamicChainB\r\n\x0binput_valueB\x0f\n\routput_resultB\x11\n\x0ftarget_metadata\"e\n\x14WorkflowNodeMetadata\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xf1\x02\n\x10TaskNodeMetadata\x12\x44\n\x0c\x63\x61\x63he_status\x18\x01 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12?\n\x0b\x63\x61talog_key\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.CatalogMetadataR\ncatalogKey\x12W\n\x12reservation_status\x18\x03 \x01(\x0e\x32(.flyteidl.core.CatalogReservation.StatusR\x11reservationStatus\x12%\n\x0e\x63heckpoint_uri\x18\x04 \x01(\tR\rcheckpointUri\x12V\n\x10\x64ynamic_workflow\x18\x10 \x01(\x0b\x32+.flyteidl.event.DynamicWorkflowNodeMetadataR\x0f\x64ynamicWorkflow\"\xce\x01\n\x1b\x44ynamicWorkflowNodeMetadata\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12S\n\x11\x63ompiled_workflow\x18\x02 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflow\x12/\n\x14\x64ynamic_job_spec_uri\x18\x03 \x01(\tR\x11\x64ynamicJobSpecUri\"U\n\x1bParentTaskExecutionMetadata\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x02id\"6\n\x1bParentNodeExecutionMetadata\x12\x17\n\x07node_id\x18\x01 \x01(\tR\x06nodeId\"b\n\x0b\x45ventReason\x12\x16\n\x06reason\x18\x01 \x01(\tR\x06reason\x12;\n\x0boccurred_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\"\x97\x08\n\x12TaskExecutionEvent\x12\x32\n\x07task_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x06taskId\x12_\n\x18parent_node_execution_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x15parentNodeExecutionId\x12#\n\rretry_attempt\x18\x03 \x01(\rR\x0cretryAttempt\x12\x38\n\x05phase\x18\x04 \x01(\x0e\x32\".flyteidl.core.TaskExecution.PhaseR\x05phase\x12\x1f\n\x0bproducer_id\x18\x05 \x01(\tR\nproducerId\x12*\n\x04logs\x18\x06 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x04logs\x12;\n\x0boccurred_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1d\n\tinput_uri\x18\x08 \x01(\tH\x00R\x08inputUri\x12:\n\ninput_data\x18\x13 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\tinputData\x12\x1f\n\noutput_uri\x18\t \x01(\tH\x01R\toutputUri\x12\x35\n\x05\x65rror\x18\n \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x01R\x05\x65rror\x12<\n\x0boutput_data\x18\x11 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x01R\noutputData\x12\x38\n\x0b\x63ustom_info\x18\x0b \x01(\x0b\x32\x17.google.protobuf.StructR\ncustomInfo\x12#\n\rphase_version\x18\x0c \x01(\rR\x0cphaseVersion\x12\x1a\n\x06reason\x18\r \x01(\tB\x02\x18\x01R\x06reason\x12\x35\n\x07reasons\x18\x15 \x03(\x0b\x32\x1b.flyteidl.event.EventReasonR\x07reasons\x12\x1b\n\ttask_type\x18\x0e \x01(\tR\x08taskType\x12\x41\n\x08metadata\x18\x10 \x01(\x0b\x32%.flyteidl.event.TaskExecutionMetadataR\x08metadata\x12#\n\revent_version\x18\x12 \x01(\x05R\x0c\x65ventVersion\x12;\n\x0breported_at\x18\x14 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\nreportedAtB\r\n\x0binput_valueB\x0f\n\routput_result\"\x9e\x02\n\x14\x45xternalResourceInfo\x12\x1f\n\x0b\x65xternal_id\x18\x01 \x01(\tR\nexternalId\x12\x14\n\x05index\x18\x02 \x01(\rR\x05index\x12#\n\rretry_attempt\x18\x03 \x01(\rR\x0cretryAttempt\x12\x38\n\x05phase\x18\x04 \x01(\x0e\x32\".flyteidl.core.TaskExecution.PhaseR\x05phase\x12\x44\n\x0c\x63\x61\x63he_status\x18\x05 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12*\n\x04logs\x18\x06 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x04logs\"[\n\x10ResourcePoolInfo\x12)\n\x10\x61llocation_token\x18\x01 \x01(\tR\x0f\x61llocationToken\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\"\x9d\x03\n\x15TaskExecutionMetadata\x12%\n\x0egenerated_name\x18\x01 \x01(\tR\rgeneratedName\x12S\n\x12\x65xternal_resources\x18\x02 \x03(\x0b\x32$.flyteidl.event.ExternalResourceInfoR\x11\x65xternalResources\x12N\n\x12resource_pool_info\x18\x03 \x03(\x0b\x32 .flyteidl.event.ResourcePoolInfoR\x10resourcePoolInfo\x12+\n\x11plugin_identifier\x18\x04 \x01(\tR\x10pluginIdentifier\x12Z\n\x0einstance_class\x18\x10 \x01(\x0e\x32\x33.flyteidl.event.TaskExecutionMetadata.InstanceClassR\rinstanceClass\"/\n\rInstanceClass\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x11\n\rINTERRUPTIBLE\x10\x01\x42\xb6\x01\n\x12\x63om.flyteidl.eventB\nEventProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\xa2\x02\x03\x46\x45X\xaa\x02\x0e\x46lyteidl.Event\xca\x02\x0e\x46lyteidl\\Event\xe2\x02\x1a\x46lyteidl\\Event\\GPBMetadata\xea\x02\x0f\x46lyteidl::Eventb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x66lyteidl/event/event.proto\x12\x0e\x66lyteidl.event\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1c\x66lyteidl/core/compiler.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1b\x66lyteidl/core/catalog.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xaa\x03\n\x16WorkflowExecutionEvent\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\x12\x1f\n\x0bproducer_id\x18\x02 \x01(\tR\nproducerId\x12<\n\x05phase\x18\x03 \x01(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x05phase\x12;\n\x0boccurred_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1f\n\noutput_uri\x18\x05 \x01(\tH\x00R\toutputUri\x12\x35\n\x05\x65rror\x18\x06 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12<\n\x0boutput_data\x18\x07 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\noutputDataB\x0f\n\routput_result\"\xb4\n\n\x12NodeExecutionEvent\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\x12\x1f\n\x0bproducer_id\x18\x02 \x01(\tR\nproducerId\x12\x38\n\x05phase\x18\x03 \x01(\x0e\x32\".flyteidl.core.NodeExecution.PhaseR\x05phase\x12;\n\x0boccurred_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1d\n\tinput_uri\x18\x05 \x01(\tH\x00R\x08inputUri\x12:\n\ninput_data\x18\x14 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\tinputData\x12\x1f\n\noutput_uri\x18\x06 \x01(\tH\x01R\toutputUri\x12\x35\n\x05\x65rror\x18\x07 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x01R\x05\x65rror\x12<\n\x0boutput_data\x18\x0f \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x01R\noutputData\x12\\\n\x16workflow_node_metadata\x18\x08 \x01(\x0b\x32$.flyteidl.event.WorkflowNodeMetadataH\x02R\x14workflowNodeMetadata\x12P\n\x12task_node_metadata\x18\x0e \x01(\x0b\x32 .flyteidl.event.TaskNodeMetadataH\x02R\x10taskNodeMetadata\x12]\n\x14parent_task_metadata\x18\t \x01(\x0b\x32+.flyteidl.event.ParentTaskExecutionMetadataR\x12parentTaskMetadata\x12]\n\x14parent_node_metadata\x18\n \x01(\x0b\x32+.flyteidl.event.ParentNodeExecutionMetadataR\x12parentNodeMetadata\x12\x1f\n\x0bretry_group\x18\x0b \x01(\tR\nretryGroup\x12 \n\x0cspec_node_id\x18\x0c \x01(\tR\nspecNodeId\x12\x1b\n\tnode_name\x18\r \x01(\tR\x08nodeName\x12#\n\revent_version\x18\x10 \x01(\x05R\x0c\x65ventVersion\x12\x1b\n\tis_parent\x18\x11 \x01(\x08R\x08isParent\x12\x1d\n\nis_dynamic\x18\x12 \x01(\x08R\tisDynamic\x12\x19\n\x08\x64\x65\x63k_uri\x18\x13 \x01(\tR\x07\x64\x65\x63kUri\x12;\n\x0breported_at\x18\x15 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\nreportedAt\x12\x19\n\x08is_array\x18\x16 \x01(\x08R\x07isArray\x12>\n\rtarget_entity\x18\x17 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0ctargetEntity\x12-\n\x13is_in_dynamic_chain\x18\x18 \x01(\x08R\x10isInDynamicChain\x12\x19\n\x08is_eager\x18\x19 \x01(\x08R\x07isEagerB\r\n\x0binput_valueB\x0f\n\routput_resultB\x11\n\x0ftarget_metadata\"e\n\x14WorkflowNodeMetadata\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xf1\x02\n\x10TaskNodeMetadata\x12\x44\n\x0c\x63\x61\x63he_status\x18\x01 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12?\n\x0b\x63\x61talog_key\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.CatalogMetadataR\ncatalogKey\x12W\n\x12reservation_status\x18\x03 \x01(\x0e\x32(.flyteidl.core.CatalogReservation.StatusR\x11reservationStatus\x12%\n\x0e\x63heckpoint_uri\x18\x04 \x01(\tR\rcheckpointUri\x12V\n\x10\x64ynamic_workflow\x18\x10 \x01(\x0b\x32+.flyteidl.event.DynamicWorkflowNodeMetadataR\x0f\x64ynamicWorkflow\"\xce\x01\n\x1b\x44ynamicWorkflowNodeMetadata\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12S\n\x11\x63ompiled_workflow\x18\x02 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflow\x12/\n\x14\x64ynamic_job_spec_uri\x18\x03 \x01(\tR\x11\x64ynamicJobSpecUri\"U\n\x1bParentTaskExecutionMetadata\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x02id\"6\n\x1bParentNodeExecutionMetadata\x12\x17\n\x07node_id\x18\x01 \x01(\tR\x06nodeId\"b\n\x0b\x45ventReason\x12\x16\n\x06reason\x18\x01 \x01(\tR\x06reason\x12;\n\x0boccurred_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\"\x97\x08\n\x12TaskExecutionEvent\x12\x32\n\x07task_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x06taskId\x12_\n\x18parent_node_execution_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x15parentNodeExecutionId\x12#\n\rretry_attempt\x18\x03 \x01(\rR\x0cretryAttempt\x12\x38\n\x05phase\x18\x04 \x01(\x0e\x32\".flyteidl.core.TaskExecution.PhaseR\x05phase\x12\x1f\n\x0bproducer_id\x18\x05 \x01(\tR\nproducerId\x12*\n\x04logs\x18\x06 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x04logs\x12;\n\x0boccurred_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1d\n\tinput_uri\x18\x08 \x01(\tH\x00R\x08inputUri\x12:\n\ninput_data\x18\x13 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x00R\tinputData\x12\x1f\n\noutput_uri\x18\t \x01(\tH\x01R\toutputUri\x12\x35\n\x05\x65rror\x18\n \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x01R\x05\x65rror\x12<\n\x0boutput_data\x18\x11 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapH\x01R\noutputData\x12\x38\n\x0b\x63ustom_info\x18\x0b \x01(\x0b\x32\x17.google.protobuf.StructR\ncustomInfo\x12#\n\rphase_version\x18\x0c \x01(\rR\x0cphaseVersion\x12\x1a\n\x06reason\x18\r \x01(\tB\x02\x18\x01R\x06reason\x12\x35\n\x07reasons\x18\x15 \x03(\x0b\x32\x1b.flyteidl.event.EventReasonR\x07reasons\x12\x1b\n\ttask_type\x18\x0e \x01(\tR\x08taskType\x12\x41\n\x08metadata\x18\x10 \x01(\x0b\x32%.flyteidl.event.TaskExecutionMetadataR\x08metadata\x12#\n\revent_version\x18\x12 \x01(\x05R\x0c\x65ventVersion\x12;\n\x0breported_at\x18\x14 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\nreportedAtB\r\n\x0binput_valueB\x0f\n\routput_result\"\xc9\x03\n\x14\x45xternalResourceInfo\x12\x1f\n\x0b\x65xternal_id\x18\x01 \x01(\tR\nexternalId\x12\x14\n\x05index\x18\x02 \x01(\rR\x05index\x12#\n\rretry_attempt\x18\x03 \x01(\rR\x0cretryAttempt\x12\x38\n\x05phase\x18\x04 \x01(\x0e\x32\".flyteidl.core.TaskExecution.PhaseR\x05phase\x12\x44\n\x0c\x63\x61\x63he_status\x18\x05 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12*\n\x04logs\x18\x06 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x04logs\x12\\\n\x16workflow_node_metadata\x18\x07 \x01(\x0b\x32$.flyteidl.event.WorkflowNodeMetadataH\x00R\x14workflowNodeMetadata\x12\x38\n\x0b\x63ustom_info\x18\x08 \x01(\x0b\x32\x17.google.protobuf.StructR\ncustomInfoB\x11\n\x0ftarget_metadata\"[\n\x10ResourcePoolInfo\x12)\n\x10\x61llocation_token\x18\x01 \x01(\tR\x0f\x61llocationToken\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\"\x9d\x03\n\x15TaskExecutionMetadata\x12%\n\x0egenerated_name\x18\x01 \x01(\tR\rgeneratedName\x12S\n\x12\x65xternal_resources\x18\x02 \x03(\x0b\x32$.flyteidl.event.ExternalResourceInfoR\x11\x65xternalResources\x12N\n\x12resource_pool_info\x18\x03 \x03(\x0b\x32 .flyteidl.event.ResourcePoolInfoR\x10resourcePoolInfo\x12+\n\x11plugin_identifier\x18\x04 \x01(\tR\x10pluginIdentifier\x12Z\n\x0einstance_class\x18\x10 \x01(\x0e\x32\x33.flyteidl.event.TaskExecutionMetadata.InstanceClassR\rinstanceClass\"/\n\rInstanceClass\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x11\n\rINTERRUPTIBLE\x10\x01\x42\xb6\x01\n\x12\x63om.flyteidl.eventB\nEventProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\xa2\x02\x03\x46\x45X\xaa\x02\x0e\x46lyteidl.Event\xca\x02\x0e\x46lyteidl\\Event\xe2\x02\x1a\x46lyteidl\\Event\\GPBMetadata\xea\x02\x0f\x46lyteidl::Eventb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -34,27 +34,27 @@ _globals['_WORKFLOWEXECUTIONEVENT']._serialized_start=262 _globals['_WORKFLOWEXECUTIONEVENT']._serialized_end=688 _globals['_NODEEXECUTIONEVENT']._serialized_start=691 - _globals['_NODEEXECUTIONEVENT']._serialized_end=1996 - _globals['_WORKFLOWNODEMETADATA']._serialized_start=1998 - _globals['_WORKFLOWNODEMETADATA']._serialized_end=2099 - _globals['_TASKNODEMETADATA']._serialized_start=2102 - _globals['_TASKNODEMETADATA']._serialized_end=2471 - _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_start=2474 - _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_end=2680 - _globals['_PARENTTASKEXECUTIONMETADATA']._serialized_start=2682 - _globals['_PARENTTASKEXECUTIONMETADATA']._serialized_end=2767 - _globals['_PARENTNODEEXECUTIONMETADATA']._serialized_start=2769 - _globals['_PARENTNODEEXECUTIONMETADATA']._serialized_end=2823 - _globals['_EVENTREASON']._serialized_start=2825 - _globals['_EVENTREASON']._serialized_end=2923 - _globals['_TASKEXECUTIONEVENT']._serialized_start=2926 - _globals['_TASKEXECUTIONEVENT']._serialized_end=3973 - _globals['_EXTERNALRESOURCEINFO']._serialized_start=3976 - _globals['_EXTERNALRESOURCEINFO']._serialized_end=4262 - _globals['_RESOURCEPOOLINFO']._serialized_start=4264 - _globals['_RESOURCEPOOLINFO']._serialized_end=4355 - _globals['_TASKEXECUTIONMETADATA']._serialized_start=4358 - _globals['_TASKEXECUTIONMETADATA']._serialized_end=4771 - _globals['_TASKEXECUTIONMETADATA_INSTANCECLASS']._serialized_start=4724 - _globals['_TASKEXECUTIONMETADATA_INSTANCECLASS']._serialized_end=4771 + _globals['_NODEEXECUTIONEVENT']._serialized_end=2023 + _globals['_WORKFLOWNODEMETADATA']._serialized_start=2025 + _globals['_WORKFLOWNODEMETADATA']._serialized_end=2126 + _globals['_TASKNODEMETADATA']._serialized_start=2129 + _globals['_TASKNODEMETADATA']._serialized_end=2498 + _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_start=2501 + _globals['_DYNAMICWORKFLOWNODEMETADATA']._serialized_end=2707 + _globals['_PARENTTASKEXECUTIONMETADATA']._serialized_start=2709 + _globals['_PARENTTASKEXECUTIONMETADATA']._serialized_end=2794 + _globals['_PARENTNODEEXECUTIONMETADATA']._serialized_start=2796 + _globals['_PARENTNODEEXECUTIONMETADATA']._serialized_end=2850 + _globals['_EVENTREASON']._serialized_start=2852 + _globals['_EVENTREASON']._serialized_end=2950 + _globals['_TASKEXECUTIONEVENT']._serialized_start=2953 + _globals['_TASKEXECUTIONEVENT']._serialized_end=4000 + _globals['_EXTERNALRESOURCEINFO']._serialized_start=4003 + _globals['_EXTERNALRESOURCEINFO']._serialized_end=4460 + _globals['_RESOURCEPOOLINFO']._serialized_start=4462 + _globals['_RESOURCEPOOLINFO']._serialized_end=4553 + _globals['_TASKEXECUTIONMETADATA']._serialized_start=4556 + _globals['_TASKEXECUTIONMETADATA']._serialized_end=4969 + _globals['_TASKEXECUTIONMETADATA_INSTANCECLASS']._serialized_start=4922 + _globals['_TASKEXECUTIONMETADATA_INSTANCECLASS']._serialized_end=4969 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/event/event_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/event/event_pb2.pyi index c159089083..75dbd41b78 100644 --- a/flyteidl/gen/pb_python/flyteidl/event/event_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/event/event_pb2.pyi @@ -32,7 +32,7 @@ class WorkflowExecutionEvent(_message.Message): def __init__(self, execution_id: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., producer_id: _Optional[str] = ..., phase: _Optional[_Union[_execution_pb2.WorkflowExecution.Phase, str]] = ..., occurred_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., output_uri: _Optional[str] = ..., error: _Optional[_Union[_execution_pb2.ExecutionError, _Mapping]] = ..., output_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ...) -> None: ... class NodeExecutionEvent(_message.Message): - __slots__ = ["id", "producer_id", "phase", "occurred_at", "input_uri", "input_data", "output_uri", "error", "output_data", "workflow_node_metadata", "task_node_metadata", "parent_task_metadata", "parent_node_metadata", "retry_group", "spec_node_id", "node_name", "event_version", "is_parent", "is_dynamic", "deck_uri", "reported_at", "is_array", "target_entity", "is_in_dynamic_chain"] + __slots__ = ["id", "producer_id", "phase", "occurred_at", "input_uri", "input_data", "output_uri", "error", "output_data", "workflow_node_metadata", "task_node_metadata", "parent_task_metadata", "parent_node_metadata", "retry_group", "spec_node_id", "node_name", "event_version", "is_parent", "is_dynamic", "deck_uri", "reported_at", "is_array", "target_entity", "is_in_dynamic_chain", "is_eager"] ID_FIELD_NUMBER: _ClassVar[int] PRODUCER_ID_FIELD_NUMBER: _ClassVar[int] PHASE_FIELD_NUMBER: _ClassVar[int] @@ -57,6 +57,7 @@ class NodeExecutionEvent(_message.Message): IS_ARRAY_FIELD_NUMBER: _ClassVar[int] TARGET_ENTITY_FIELD_NUMBER: _ClassVar[int] IS_IN_DYNAMIC_CHAIN_FIELD_NUMBER: _ClassVar[int] + IS_EAGER_FIELD_NUMBER: _ClassVar[int] id: _identifier_pb2.NodeExecutionIdentifier producer_id: str phase: _execution_pb2.NodeExecution.Phase @@ -81,7 +82,8 @@ class NodeExecutionEvent(_message.Message): is_array: bool target_entity: _identifier_pb2.Identifier is_in_dynamic_chain: bool - def __init__(self, id: _Optional[_Union[_identifier_pb2.NodeExecutionIdentifier, _Mapping]] = ..., producer_id: _Optional[str] = ..., phase: _Optional[_Union[_execution_pb2.NodeExecution.Phase, str]] = ..., occurred_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., input_uri: _Optional[str] = ..., input_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., output_uri: _Optional[str] = ..., error: _Optional[_Union[_execution_pb2.ExecutionError, _Mapping]] = ..., output_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., workflow_node_metadata: _Optional[_Union[WorkflowNodeMetadata, _Mapping]] = ..., task_node_metadata: _Optional[_Union[TaskNodeMetadata, _Mapping]] = ..., parent_task_metadata: _Optional[_Union[ParentTaskExecutionMetadata, _Mapping]] = ..., parent_node_metadata: _Optional[_Union[ParentNodeExecutionMetadata, _Mapping]] = ..., retry_group: _Optional[str] = ..., spec_node_id: _Optional[str] = ..., node_name: _Optional[str] = ..., event_version: _Optional[int] = ..., is_parent: bool = ..., is_dynamic: bool = ..., deck_uri: _Optional[str] = ..., reported_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., is_array: bool = ..., target_entity: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., is_in_dynamic_chain: bool = ...) -> None: ... + is_eager: bool + def __init__(self, id: _Optional[_Union[_identifier_pb2.NodeExecutionIdentifier, _Mapping]] = ..., producer_id: _Optional[str] = ..., phase: _Optional[_Union[_execution_pb2.NodeExecution.Phase, str]] = ..., occurred_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., input_uri: _Optional[str] = ..., input_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., output_uri: _Optional[str] = ..., error: _Optional[_Union[_execution_pb2.ExecutionError, _Mapping]] = ..., output_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., workflow_node_metadata: _Optional[_Union[WorkflowNodeMetadata, _Mapping]] = ..., task_node_metadata: _Optional[_Union[TaskNodeMetadata, _Mapping]] = ..., parent_task_metadata: _Optional[_Union[ParentTaskExecutionMetadata, _Mapping]] = ..., parent_node_metadata: _Optional[_Union[ParentNodeExecutionMetadata, _Mapping]] = ..., retry_group: _Optional[str] = ..., spec_node_id: _Optional[str] = ..., node_name: _Optional[str] = ..., event_version: _Optional[int] = ..., is_parent: bool = ..., is_dynamic: bool = ..., deck_uri: _Optional[str] = ..., reported_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., is_array: bool = ..., target_entity: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., is_in_dynamic_chain: bool = ..., is_eager: bool = ...) -> None: ... class WorkflowNodeMetadata(_message.Message): __slots__ = ["execution_id"] @@ -178,20 +180,24 @@ class TaskExecutionEvent(_message.Message): def __init__(self, task_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., parent_node_execution_id: _Optional[_Union[_identifier_pb2.NodeExecutionIdentifier, _Mapping]] = ..., retry_attempt: _Optional[int] = ..., phase: _Optional[_Union[_execution_pb2.TaskExecution.Phase, str]] = ..., producer_id: _Optional[str] = ..., logs: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., occurred_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., input_uri: _Optional[str] = ..., input_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., output_uri: _Optional[str] = ..., error: _Optional[_Union[_execution_pb2.ExecutionError, _Mapping]] = ..., output_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., custom_info: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., phase_version: _Optional[int] = ..., reason: _Optional[str] = ..., reasons: _Optional[_Iterable[_Union[EventReason, _Mapping]]] = ..., task_type: _Optional[str] = ..., metadata: _Optional[_Union[TaskExecutionMetadata, _Mapping]] = ..., event_version: _Optional[int] = ..., reported_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... class ExternalResourceInfo(_message.Message): - __slots__ = ["external_id", "index", "retry_attempt", "phase", "cache_status", "logs"] + __slots__ = ["external_id", "index", "retry_attempt", "phase", "cache_status", "logs", "workflow_node_metadata", "custom_info"] EXTERNAL_ID_FIELD_NUMBER: _ClassVar[int] INDEX_FIELD_NUMBER: _ClassVar[int] RETRY_ATTEMPT_FIELD_NUMBER: _ClassVar[int] PHASE_FIELD_NUMBER: _ClassVar[int] CACHE_STATUS_FIELD_NUMBER: _ClassVar[int] LOGS_FIELD_NUMBER: _ClassVar[int] + WORKFLOW_NODE_METADATA_FIELD_NUMBER: _ClassVar[int] + CUSTOM_INFO_FIELD_NUMBER: _ClassVar[int] external_id: str index: int retry_attempt: int phase: _execution_pb2.TaskExecution.Phase cache_status: _catalog_pb2.CatalogCacheStatus logs: _containers.RepeatedCompositeFieldContainer[_execution_pb2.TaskLog] - def __init__(self, external_id: _Optional[str] = ..., index: _Optional[int] = ..., retry_attempt: _Optional[int] = ..., phase: _Optional[_Union[_execution_pb2.TaskExecution.Phase, str]] = ..., cache_status: _Optional[_Union[_catalog_pb2.CatalogCacheStatus, str]] = ..., logs: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ...) -> None: ... + workflow_node_metadata: WorkflowNodeMetadata + custom_info: _struct_pb2.Struct + def __init__(self, external_id: _Optional[str] = ..., index: _Optional[int] = ..., retry_attempt: _Optional[int] = ..., phase: _Optional[_Union[_execution_pb2.TaskExecution.Phase, str]] = ..., cache_status: _Optional[_Union[_catalog_pb2.CatalogCacheStatus, str]] = ..., logs: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., workflow_node_metadata: _Optional[_Union[WorkflowNodeMetadata, _Mapping]] = ..., custom_info: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... class ResourcePoolInfo(_message.Message): __slots__ = ["allocation_token", "namespace"] diff --git a/flyteidl/gen/pb_rust/flyteidl.admin.rs b/flyteidl/gen/pb_rust/flyteidl.admin.rs index 30f39ab45d..dd099daa67 100644 --- a/flyteidl/gen/pb_rust/flyteidl.admin.rs +++ b/flyteidl/gen/pb_rust/flyteidl.admin.rs @@ -177,6 +177,9 @@ pub struct Resource { /// Custom data specific to the agent. #[prost(message, optional, tag="6")] pub custom_info: ::core::option::Option<::prost_types::Struct>, + /// The error raised during execution + #[prost(message, optional, tag="7")] + pub agent_error: ::core::option::Option, } /// A message used to delete a task. #[allow(clippy::derive_partial_eq_without_eq)] @@ -348,6 +351,51 @@ pub mod get_task_logs_response { Body(super::GetTaskLogsResponseBody), } } +/// Error message to propagate detailed errors from agent executions to the execution +/// engine. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AgentError { + /// A simplified code for errors, so that we can provide a glossary of all possible errors. + #[prost(string, tag="1")] + pub code: ::prost::alloc::string::String, + /// An abstract error kind for this error. Defaults to Non_Recoverable if not specified. + #[prost(enumeration="agent_error::Kind", tag="3")] + pub kind: i32, + /// Defines the origin of the error (system, user, unknown). + #[prost(enumeration="super::core::execution_error::ErrorKind", tag="4")] + pub origin: i32, +} +/// Nested message and enum types in `AgentError`. +pub mod agent_error { + /// Defines a generic error type that dictates the behavior of the retry strategy. + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] + #[repr(i32)] + pub enum Kind { + NonRecoverable = 0, + Recoverable = 1, + } + impl Kind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Kind::NonRecoverable => "NON_RECOVERABLE", + Kind::Recoverable => "RECOVERABLE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NON_RECOVERABLE" => Some(Self::NonRecoverable), + "RECOVERABLE" => Some(Self::Recoverable), + _ => None, + } + } + } +} /// The state of the execution is used to control its visibility in the UI/CLI. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] @@ -2274,6 +2322,9 @@ pub struct NodeExecutionMetaData { /// array nodes from other nodes which can have is_parent_node as true. #[prost(bool, tag="5")] pub is_array: bool, + /// Whether this node is an eager node. + #[prost(bool, tag="6")] + pub is_eager: bool, } /// Request structure to retrieve a list of node execution entities. /// See :ref:`ref_flyteidl.admin.NodeExecution` for more details diff --git a/flyteidl/gen/pb_rust/flyteidl.core.rs b/flyteidl/gen/pb_rust/flyteidl.core.rs index a97a209a47..62ad0b1602 100644 --- a/flyteidl/gen/pb_rust/flyteidl.core.rs +++ b/flyteidl/gen/pb_rust/flyteidl.core.rs @@ -568,7 +568,7 @@ pub struct UnionInfo { pub struct BindingData { #[prost(message, optional, tag="5")] pub union: ::core::option::Option, - #[prost(oneof="binding_data::Value", tags="1, 2, 3, 4")] + #[prost(oneof="binding_data::Value", tags="1, 2, 3, 4, 6")] pub value: ::core::option::Option, } /// Nested message and enum types in `BindingData`. @@ -589,6 +589,11 @@ pub mod binding_data { /// A map of bindings. The key is always a string. #[prost(message, tag="4")] Map(super::BindingDataMap), + /// Offloaded literal metadata + /// When you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata. + /// Used for nodes that don't have promises from upstream nodes such as ArrayNode subNodes. + #[prost(message, tag="6")] + OffloadedMetadata(super::LiteralOffloadedMetadata), } } /// An input/output binding of a variable to either static value or a node output. @@ -1385,6 +1390,10 @@ pub struct TaskMetadata { /// cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache. #[prost(string, repeated, tag="13")] pub cache_ignore_input_vars: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// is_eager indicates whether the task is eager or not. + /// This would be used by CreateTask endpoint. + #[prost(bool, tag="14")] + pub is_eager: bool, // For interruptible we will populate it at the node level but require it be part of TaskMetadata // for a user to set the value. // We are using oneof instead of bool because otherwise we would be unable to distinguish between value being @@ -2431,6 +2440,12 @@ pub struct ArrayNode { /// execution_mode determines the execution path for ArrayNode. #[prost(enumeration="array_node::ExecutionMode", tag="5")] pub execution_mode: i32, + /// Indicates whether the sub node's original interface was altered + #[prost(message, optional, tag="6")] + pub is_original_sub_node_interface: ::core::option::Option, + /// data_mode determines how input data is passed to the sub-nodes + #[prost(enumeration="array_node::DataMode", tag="7")] + pub data_mode: i32, #[prost(oneof="array_node::ParallelismOption", tags="2")] pub parallelism_option: ::core::option::Option, #[prost(oneof="array_node::SuccessCriteria", tags="3, 4")] @@ -2468,6 +2483,40 @@ pub mod array_node { } } } + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] + #[repr(i32)] + pub enum DataMode { + /// Indicates the ArrayNode's input is a list of input values that map to subNode executions. + /// The file path set for the subNode will be the ArrayNode's input file, but the in-memory + /// value utilized in propeller will be the individual value for each subNode execution. + /// SubNode executions need to be able to read in and parse the individual value to execute correctly. + SingleInputFile = 0, + /// Indicates the ArrayNode's input is a list of input values that map to subNode executions. + /// Propeller will create input files for each ArrayNode subNode by parsing the inputs and + /// setting the InputBindings on each subNodeSpec. Both the file path and in-memory input values will + /// be the individual value for each subNode execution. + IndividualInputFiles = 1, + } + impl DataMode { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + DataMode::SingleInputFile => "SINGLE_INPUT_FILE", + DataMode::IndividualInputFiles => "INDIVIDUAL_INPUT_FILES", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SINGLE_INPUT_FILE" => Some(Self::SingleInputFile), + "INDIVIDUAL_INPUT_FILES" => Some(Self::IndividualInputFiles), + _ => None, + } + } + } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, Copy, PartialEq, ::prost::Oneof)] pub enum ParallelismOption { diff --git a/flyteidl/gen/pb_rust/flyteidl.event.rs b/flyteidl/gen/pb_rust/flyteidl.event.rs index 80a8a11442..b1af2cbd9b 100644 --- a/flyteidl/gen/pb_rust/flyteidl.event.rs +++ b/flyteidl/gen/pb_rust/flyteidl.event.rs @@ -100,6 +100,9 @@ pub struct NodeExecutionEvent { /// as well as task IDs in any corresponding Task Executions, should not be used to looked up the task in Admin's db. #[prost(bool, tag="24")] pub is_in_dynamic_chain: bool, + /// Whether this node launched an eager task. + #[prost(bool, tag="25")] + pub is_eager: bool, #[prost(oneof="node_execution_event::InputValue", tags="5, 20")] pub input_value: ::core::option::Option, #[prost(oneof="node_execution_event::OutputResult", tags="6, 7, 15")] @@ -331,6 +334,26 @@ pub struct ExternalResourceInfo { /// log information for the external resource execution #[prost(message, repeated, tag="6")] pub logs: ::prost::alloc::vec::Vec, + /// Extensible field for custom, plugin-specific info + #[prost(message, optional, tag="8")] + pub custom_info: ::core::option::Option<::prost_types::Struct>, + /// Additional metadata to do with this event's node target based on the node type. We are + /// explicitly not including the task_node_metadata here because it is not clear if it is needed. + /// If we decide to include in the future, we should deprecate the cache_status field. + #[prost(oneof="external_resource_info::TargetMetadata", tags="7")] + pub target_metadata: ::core::option::Option, +} +/// Nested message and enum types in `ExternalResourceInfo`. +pub mod external_resource_info { + /// Additional metadata to do with this event's node target based on the node type. We are + /// explicitly not including the task_node_metadata here because it is not clear if it is needed. + /// If we decide to include in the future, we should deprecate the cache_status field. + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum TargetMetadata { + #[prost(message, tag="7")] + WorkflowNodeMetadata(super::WorkflowNodeMetadata), + } } /// This message holds task execution metadata specific to resource allocation used to manage concurrent /// executions for a project namespace. @@ -421,6 +444,9 @@ pub struct CloudEventWorkflowExecution { /// Launch plan IDs are easier to get than workflow IDs so we'll use these for now. #[prost(message, optional, tag="6")] pub launch_plan_id: ::core::option::Option, + /// We can't have the ExecutionMetadata object directly because of import cycle + #[prost(map="string, string", tag="7")] + pub labels: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -444,12 +470,18 @@ pub struct CloudEventNodeExecution { /// Launch plan IDs are easier to get than workflow IDs so we'll use these for now. #[prost(message, optional, tag="6")] pub launch_plan_id: ::core::option::Option, + /// We can't have the ExecutionMetadata object directly because of import cycle + #[prost(map="string, string", tag="7")] + pub labels: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CloudEventTaskExecution { #[prost(message, optional, tag="1")] pub raw_event: ::core::option::Option, + /// We can't have the ExecutionMetadata object directly because of import cycle + #[prost(map="string, string", tag="2")] + pub labels: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, } /// This event is to be sent by Admin after it creates an execution. #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/flyteidl/go.mod b/flyteidl/go.mod index 0da94cea32..6653ce5a87 100644 --- a/flyteidl/go.mod +++ b/flyteidl/go.mod @@ -5,6 +5,7 @@ go 1.22 require ( github.com/flyteorg/flyte/flytestdlib v0.0.0-00010101000000-000000000000 github.com/go-test/deep v1.0.7 + github.com/golang-jwt/jwt/v5 v5.2.1 github.com/golang/protobuf v1.5.3 github.com/grpc-ecosystem/go-grpc-middleware v1.1.0 github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 @@ -55,7 +56,6 @@ require ( github.com/go-openapi/jsonreference v0.20.2 // indirect github.com/go-openapi/swag v0.22.3 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang-jwt/jwt/v5 v5.2.1 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/gnostic-models v0.6.8 // indirect github.com/google/go-cmp v0.6.0 // indirect diff --git a/flyteidl/protos/flyteidl/admin/agent.proto b/flyteidl/protos/flyteidl/admin/agent.proto index 931c27785f..96fbb087b8 100644 --- a/flyteidl/protos/flyteidl/admin/agent.proto +++ b/flyteidl/protos/flyteidl/admin/agent.proto @@ -138,6 +138,8 @@ message Resource { core.TaskExecution.Phase phase = 5; // Custom data specific to the agent. google.protobuf.Struct custom_info = 6; + // The error raised during execution + AgentError agent_error = 7; } // A message used to delete a task. @@ -256,3 +258,22 @@ message GetTaskLogsResponse { GetTaskLogsResponseBody body = 2; } } + +// Error message to propagate detailed errors from agent executions to the execution +// engine. +message AgentError { + // A simplified code for errors, so that we can provide a glossary of all possible errors. + string code = 1; + + // Defines a generic error type that dictates the behavior of the retry strategy. + enum Kind { + NON_RECOVERABLE = 0; + RECOVERABLE = 1; + } + + // An abstract error kind for this error. Defaults to Non_Recoverable if not specified. + Kind kind = 3; + + // Defines the origin of the error (system, user, unknown). + core.ExecutionError.ErrorKind origin = 4; +} diff --git a/flyteidl/protos/flyteidl/admin/execution.proto b/flyteidl/protos/flyteidl/admin/execution.proto index 6197576bd9..d3802057a6 100644 --- a/flyteidl/protos/flyteidl/admin/execution.proto +++ b/flyteidl/protos/flyteidl/admin/execution.proto @@ -206,6 +206,8 @@ message SystemMetadata { message ExecutionMetadata { // The method by which this execution was launched. enum ExecutionMode { + reserved 7; + // The default execution mode, MANUAL implies that an execution was launched by an individual. MANUAL = 0; diff --git a/flyteidl/protos/flyteidl/admin/node_execution.proto b/flyteidl/protos/flyteidl/admin/node_execution.proto index 411201ea45..7ae7aa7ee1 100644 --- a/flyteidl/protos/flyteidl/admin/node_execution.proto +++ b/flyteidl/protos/flyteidl/admin/node_execution.proto @@ -118,6 +118,9 @@ message NodeExecutionMetaData { // Boolean flag indicating if the node is an array node. This is intended to uniquely identify // array nodes from other nodes which can have is_parent_node as true. bool is_array = 5; + + // Whether this node is an eager node. + bool is_eager = 6; } // Request structure to retrieve a list of node execution entities. diff --git a/flyteidl/protos/flyteidl/core/literals.proto b/flyteidl/protos/flyteidl/core/literals.proto index 66e4821867..92891066c0 100644 --- a/flyteidl/protos/flyteidl/core/literals.proto +++ b/flyteidl/protos/flyteidl/core/literals.proto @@ -169,6 +169,11 @@ message BindingData { // A map of bindings. The key is always a string. BindingDataMap map = 4; + + // Offloaded literal metadata + // When you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata. + // Used for nodes that don't have promises from upstream nodes such as ArrayNode subNodes. + LiteralOffloadedMetadata offloaded_metadata = 6; } UnionInfo union = 5; diff --git a/flyteidl/protos/flyteidl/core/tasks.proto b/flyteidl/protos/flyteidl/core/tasks.proto index 20a1fa0cbf..332f9fdad4 100644 --- a/flyteidl/protos/flyteidl/core/tasks.proto +++ b/flyteidl/protos/flyteidl/core/tasks.proto @@ -134,6 +134,9 @@ message TaskMetadata { // cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache. repeated string cache_ignore_input_vars = 13; + // is_eager indicates whether the task is eager or not. + // This would be used by CreateTask endpoint. + bool is_eager = 14; } // A Task structure that uniquely identifies a task in the system diff --git a/flyteidl/protos/flyteidl/core/workflow.proto b/flyteidl/protos/flyteidl/core/workflow.proto index 3df4b2422f..5e95531976 100644 --- a/flyteidl/protos/flyteidl/core/workflow.proto +++ b/flyteidl/protos/flyteidl/core/workflow.proto @@ -147,6 +147,26 @@ message ArrayNode { // execution_mode determines the execution path for ArrayNode. ExecutionMode execution_mode = 5; + + // Indicates whether the sub node's original interface was altered + google.protobuf.BoolValue is_original_sub_node_interface = 6; + + enum DataMode { + // Indicates the ArrayNode's input is a list of input values that map to subNode executions. + // The file path set for the subNode will be the ArrayNode's input file, but the in-memory + // value utilized in propeller will be the individual value for each subNode execution. + // SubNode executions need to be able to read in and parse the individual value to execute correctly. + SINGLE_INPUT_FILE = 0; + + // Indicates the ArrayNode's input is a list of input values that map to subNode executions. + // Propeller will create input files for each ArrayNode subNode by parsing the inputs and + // setting the InputBindings on each subNodeSpec. Both the file path and in-memory input values will + // be the individual value for each subNode execution. + INDIVIDUAL_INPUT_FILES = 1; + } + + // data_mode determines how input data is passed to the sub-nodes + DataMode data_mode = 7; } // Defines extra information about the Node. diff --git a/flyteidl/protos/flyteidl/event/cloudevents.proto b/flyteidl/protos/flyteidl/event/cloudevents.proto index d02c5ff516..167e614a1c 100644 --- a/flyteidl/protos/flyteidl/event/cloudevents.proto +++ b/flyteidl/protos/flyteidl/event/cloudevents.proto @@ -28,6 +28,9 @@ message CloudEventWorkflowExecution { // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. core.Identifier launch_plan_id = 6; + + // We can't have the ExecutionMetadata object directly because of import cycle + map labels = 7; } message CloudEventNodeExecution { @@ -48,10 +51,15 @@ message CloudEventNodeExecution { // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. core.Identifier launch_plan_id = 6; + + // We can't have the ExecutionMetadata object directly because of import cycle + map labels = 7; } message CloudEventTaskExecution { event.TaskExecutionEvent raw_event = 1; + // We can't have the ExecutionMetadata object directly because of import cycle + map labels = 2; } // This event is to be sent by Admin after it creates an execution. diff --git a/flyteidl/protos/flyteidl/event/event.proto b/flyteidl/protos/flyteidl/event/event.proto index 640b4804e9..67f204232f 100644 --- a/flyteidl/protos/flyteidl/event/event.proto +++ b/flyteidl/protos/flyteidl/event/event.proto @@ -127,6 +127,9 @@ message NodeExecutionEvent { // if the relevant execution entity is was registered, or dynamic. This field indicates that the target_entity ID, // as well as task IDs in any corresponding Task Executions, should not be used to looked up the task in Admin's db. bool is_in_dynamic_chain = 24; + + // Whether this node launched an eager task. + bool is_eager = 25; } // For Workflow Nodes we need to send information about the workflow that's launched @@ -284,6 +287,16 @@ message ExternalResourceInfo { // log information for the external resource execution repeated core.TaskLog logs = 6; + + // Additional metadata to do with this event's node target based on the node type. We are + // explicitly not including the task_node_metadata here because it is not clear if it is needed. + // If we decide to include in the future, we should deprecate the cache_status field. + oneof target_metadata { + WorkflowNodeMetadata workflow_node_metadata = 7; + } + + // Extensible field for custom, plugin-specific info + google.protobuf.Struct custom_info = 8; } diff --git a/flyteplugins/.golangci.yml b/flyteplugins/.golangci.yml index 6d13f4a3b6..9b6ab1e86d 100644 --- a/flyteplugins/.golangci.yml +++ b/flyteplugins/.golangci.yml @@ -1,35 +1,25 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - run: skip-dirs: - pkg/client - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - gci - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unparam - unused - - varcheck - + - protogetter linters-settings: gci: custom-order: true diff --git a/flyteplugins/go.mod b/flyteplugins/go.mod index 7616900390..11962b93c9 100644 --- a/flyteplugins/go.mod +++ b/flyteplugins/go.mod @@ -11,6 +11,7 @@ require ( github.com/coocood/freecache v1.1.1 github.com/dask/dask-kubernetes/v2023 v2023.0.0-20230626103304-abd02cd17b26 github.com/flyteorg/flyte/flyteidl v0.0.0-00010101000000-000000000000 + github.com/flyteorg/flyte/flytepropeller v0.0.0-00010101000000-000000000000 github.com/flyteorg/flyte/flytestdlib v0.0.0-00010101000000-000000000000 github.com/go-test/deep v1.0.7 github.com/golang/protobuf v1.5.3 @@ -53,6 +54,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.0 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect + github.com/Masterminds/semver v1.5.0 // indirect github.com/aws/aws-sdk-go-v2/credentials v1.0.0 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.0.0 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.0.0 // indirect @@ -117,7 +119,7 @@ require ( github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.2.0 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect go.opentelemetry.io/otel v1.24.0 // indirect go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect diff --git a/flyteplugins/go.sum b/flyteplugins/go.sum index d11f6b60a3..18242a8638 100644 --- a/flyteplugins/go.sum +++ b/flyteplugins/go.sum @@ -62,6 +62,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/GoogleCloudPlatform/spark-on-k8s-operator v0.0.0-20200723154620-6f35a1152625 h1:cQyO5JQ2iuHnEcF3v24kdDMsgh04RjyFPDtuvD6PCE0= github.com/GoogleCloudPlatform/spark-on-k8s-operator v0.0.0-20200723154620-6f35a1152625/go.mod h1:6PnrZv6zUDkrNMw0mIoGRmGBR7i9LulhKPmxFq4rUiM= +github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/aws/aws-sdk-go v1.47.11 h1:Dol+MA+hQblbnXUI3Vk9qvoekU6O1uDEuAItezjiWNQ= @@ -310,6 +312,7 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/ncw/swift v1.0.53 h1:luHjjTNtekIEvHg5KdAFIBaH7bWfNkefwFnpDffSIks= github.com/ncw/swift v1.0.53/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo/v2 v2.11.0 h1:WgqUCUt/lT6yXoQ8Wef0fsNn5cAuMK7+KT9UFRz2tcU= github.com/onsi/ginkgo/v2 v2.11.0/go.mod h1:ZhrRA5XmEE3x3rhlzamx/JJvujdZoJ2uvgI7kR0iZvM= github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= @@ -389,8 +392,8 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 h1:SpGay3w+nEwMpfVnbqOLH5gY52/foP8RE8UzTZ1pdSE= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1/go.mod h1:4UoMYEZOC0yN/sPGH76KPkkU7zgiEWYWL9vwmbnTJPE= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 h1:UNQQKPfTDe1J81ViolILjTKPr9WetKW6uei2hFgJmFs= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0/go.mod h1:r9vWsPS/3AQItv3OSlEJ/E4mbrhUbbw18meOjArPtKQ= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 h1:aFJWCqJMNjENlcleuuOkGAPH82y0yULBScfXcIEdS24= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1/go.mod h1:sEGXWArGqc3tVa+ekntsN65DmVbVeW+7lTKTjZF3/Fo= go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= diff --git a/flyteplugins/go/tasks/logs/logging_utils.go b/flyteplugins/go/tasks/logs/logging_utils.go index 4bfff0dd17..c237fc15c8 100644 --- a/flyteplugins/go/tasks/logs/logging_utils.go +++ b/flyteplugins/go/tasks/logs/logging_utils.go @@ -24,12 +24,14 @@ func GetLogsForContainerInPod(ctx context.Context, logPlugin tasklog.Plugin, tas return nil, nil } + // #nosec G115 if uint32(len(pod.Spec.Containers)) <= index { logger.Errorf(ctx, "container IndexOutOfBound, requested [%d], but total containers [%d] in pod phase [%v]", index, len(pod.Spec.Containers), pod.Status.Phase) return nil, nil } containerID := v1.ContainerStatus{}.ContainerID + // #nosec G115 if uint32(len(pod.Status.ContainerStatuses)) <= index { msg := fmt.Sprintf("containerStatus IndexOutOfBound, requested [%d], but total containerStatuses [%d] in pod phase [%v]", index, len(pod.Status.ContainerStatuses), pod.Status.Phase) if pod.Status.Phase == v1.PodPending { @@ -129,6 +131,8 @@ func InitializeLogPlugins(cfg *LogConfig) (tasklog.Plugin, error) { DisplayName: dynamicLogLink.DisplayName, DynamicTemplateURIs: dynamicLogLink.TemplateURIs, MessageFormat: core.TaskLog_JSON, + ShowWhilePending: dynamicLogLink.ShowWhilePending, + HideOnceFinished: dynamicLogLink.HideOnceFinished, }) } diff --git a/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast.go b/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast.go index a36edf20ea..0447b488cd 100644 --- a/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast.go +++ b/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast.go @@ -31,7 +31,7 @@ func (h failFastHandler) Handle(ctx context.Context, tCtx core.TaskExecutionCont } return core.DoTransition(core.PhaseInfoFailure("AlwaysFail", fmt.Sprintf("Task [%s] type [%+v] not supported by platform for this project/domain/workflow", - taskTemplate.Type, tCtx.TaskExecutionMetadata().GetTaskExecutionID()), &core.TaskInfo{ + taskTemplate.GetType(), tCtx.TaskExecutionMetadata().GetTaskExecutionID()), &core.TaskInfo{ OccurredAt: &occuredAt, })), nil } diff --git a/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast_test.go b/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast_test.go index c302db8c32..bf856290d1 100644 --- a/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/bundle/fail_fast_test.go @@ -48,8 +48,8 @@ func TestHandleAlwaysFails(t *testing.T) { transition, err := testHandler.Handle(context.TODO(), taskCtx) assert.NoError(t, err) assert.Equal(t, core.PhasePermanentFailure, transition.Info().Phase()) - assert.Equal(t, "AlwaysFail", transition.Info().Err().Code) - assert.Contains(t, transition.Info().Err().Message, "Task [unsupportedtype]") + assert.Equal(t, "AlwaysFail", transition.Info().Err().GetCode()) + assert.Contains(t, transition.Info().Err().GetMessage(), "Task [unsupportedtype]") } func TestAbort(t *testing.T) { diff --git a/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl.go b/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl.go index c056989905..224cfd612e 100644 --- a/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl.go +++ b/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl.go @@ -41,7 +41,7 @@ func consistentHash(str string) (string, error) { func hashInputs(ctx context.Context, key Key) (string, error) { inputs := &core.LiteralMap{} - if key.TypedInterface.Inputs != nil { + if key.TypedInterface.GetInputs() != nil { retInputs, err := key.InputReader.Get(ctx) if err != nil { return "", err @@ -88,7 +88,7 @@ func (c AsyncClientImpl) Download(ctx context.Context, requests ...DownloadReque } if readerWorkItem.IsCached() { - cachedResults.Set(uint(idx)) + cachedResults.Set(uint(idx)) // #nosec G115 cachedCount++ } case workqueue.WorkStatusFailed: diff --git a/flyteplugins/go/tasks/pluginmachinery/catalog/client_test.go b/flyteplugins/go/tasks/pluginmachinery/catalog/client_test.go index 15a4347351..7bdb435761 100644 --- a/flyteplugins/go/tasks/pluginmachinery/catalog/client_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/catalog/client_test.go @@ -49,11 +49,11 @@ func TestStatus(t *testing.T) { status := NewStatus(cacheStatus, &catalogMetadata) assert.Equal(t, status.GetCacheStatus(), cacheStatus) - assert.Equal(t, status.GetMetadata().DatasetId.Project, catalogMetadata.DatasetId.Project) - assert.Equal(t, status.GetMetadata().DatasetId.Domain, catalogMetadata.DatasetId.Domain) - assert.Equal(t, status.GetMetadata().DatasetId.Name, catalogMetadata.DatasetId.Name) - assert.Equal(t, status.GetMetadata().ArtifactTag.ArtifactId, catalogMetadata.ArtifactTag.ArtifactId) - assert.Equal(t, status.GetMetadata().ArtifactTag.Name, catalogMetadata.ArtifactTag.Name) + assert.Equal(t, status.GetMetadata().GetDatasetId().GetProject(), catalogMetadata.GetDatasetId().GetProject()) + assert.Equal(t, status.GetMetadata().GetDatasetId().GetDomain(), catalogMetadata.GetDatasetId().GetDomain()) + assert.Equal(t, status.GetMetadata().GetDatasetId().GetName(), catalogMetadata.GetDatasetId().GetName()) + assert.Equal(t, status.GetMetadata().GetArtifactTag().GetArtifactId(), catalogMetadata.GetArtifactTag().GetArtifactId()) + assert.Equal(t, status.GetMetadata().GetArtifactTag().GetName(), catalogMetadata.GetArtifactTag().GetName()) } func TestEntry(t *testing.T) { @@ -75,11 +75,11 @@ func TestEntry(t *testing.T) { t.Run(tt.name, func(t *testing.T) { status := tt.entry.GetStatus() assert.Equal(t, status.GetCacheStatus(), cacheStatus) - assert.Equal(t, status.GetMetadata().DatasetId.Project, catalogMetadata.DatasetId.Project) - assert.Equal(t, status.GetMetadata().DatasetId.Domain, catalogMetadata.DatasetId.Domain) - assert.Equal(t, status.GetMetadata().DatasetId.Name, catalogMetadata.DatasetId.Name) - assert.Equal(t, status.GetMetadata().ArtifactTag.ArtifactId, catalogMetadata.ArtifactTag.ArtifactId) - assert.Equal(t, status.GetMetadata().ArtifactTag.Name, catalogMetadata.ArtifactTag.Name) + assert.Equal(t, status.GetMetadata().GetDatasetId().GetProject(), catalogMetadata.GetDatasetId().GetProject()) + assert.Equal(t, status.GetMetadata().GetDatasetId().GetDomain(), catalogMetadata.GetDatasetId().GetDomain()) + assert.Equal(t, status.GetMetadata().GetDatasetId().GetName(), catalogMetadata.GetDatasetId().GetName()) + assert.Equal(t, status.GetMetadata().GetArtifactTag().GetArtifactId(), catalogMetadata.GetArtifactTag().GetArtifactId()) + assert.Equal(t, status.GetMetadata().GetArtifactTag().GetName(), catalogMetadata.GetArtifactTag().GetName()) }) } } diff --git a/flyteplugins/go/tasks/pluginmachinery/catalog/hashing.go b/flyteplugins/go/tasks/pluginmachinery/catalog/hashing.go index 4cc2fbd5cd..7dda4afa97 100644 --- a/flyteplugins/go/tasks/pluginmachinery/catalog/hashing.go +++ b/flyteplugins/go/tasks/pluginmachinery/catalog/hashing.go @@ -27,7 +27,7 @@ func hashify(literal *core.Literal) *core.Literal { // 1. A collection of literals or // 2. A map of literals if literal.GetCollection() != nil { - literals := literal.GetCollection().Literals + literals := literal.GetCollection().GetLiterals() literalsHash := make([]*core.Literal, 0) for _, lit := range literals { literalsHash = append(literalsHash, hashify(lit)) @@ -42,7 +42,7 @@ func hashify(literal *core.Literal) *core.Literal { } if literal.GetMap() != nil { literalsMap := make(map[string]*core.Literal) - for key, lit := range literal.GetMap().Literals { + for key, lit := range literal.GetMap().GetLiterals() { literalsMap[key] = hashify(lit) } return &core.Literal{ @@ -58,14 +58,14 @@ func hashify(literal *core.Literal) *core.Literal { } func HashLiteralMap(ctx context.Context, literalMap *core.LiteralMap, cacheIgnoreInputVars []string) (string, error) { - if literalMap == nil || len(literalMap.Literals) == 0 { + if literalMap == nil || len(literalMap.GetLiterals()) == 0 { literalMap = &emptyLiteralMap } // Hashify, i.e. generate a copy of the literal map where each literal value is removed // in case the corresponding hash is set. - hashifiedLiteralMap := make(map[string]*core.Literal, len(literalMap.Literals)) - for name, literal := range literalMap.Literals { + hashifiedLiteralMap := make(map[string]*core.Literal, len(literalMap.GetLiterals())) + for name, literal := range literalMap.GetLiterals() { if !slices.Contains(cacheIgnoreInputVars, name) { hashifiedLiteralMap[name] = hashify(literal) } diff --git a/flyteplugins/go/tasks/pluginmachinery/core/exec_metadata.go b/flyteplugins/go/tasks/pluginmachinery/core/exec_metadata.go index 5969d44661..9a020bd188 100644 --- a/flyteplugins/go/tasks/pluginmachinery/core/exec_metadata.go +++ b/flyteplugins/go/tasks/pluginmachinery/core/exec_metadata.go @@ -27,7 +27,7 @@ type TaskExecutionID interface { GetGeneratedNameWith(minLength, maxLength int) (string, error) // GetID returns the underlying idl task identifier. - GetID() core.TaskExecutionIdentifier + GetID() core.TaskExecutionIdentifier // TODO (whynopointer) // GetUniqueNodeID returns the fully-qualified Node ID that is unique within a // given workflow execution. @@ -48,7 +48,7 @@ type TaskExecutionMetadata interface { GetMaxAttempts() uint32 GetAnnotations() map[string]string GetK8sServiceAccount() string - GetSecurityContext() core.SecurityContext + GetSecurityContext() core.SecurityContext // TODO (whynopointer) IsInterruptible() bool GetPlatformResources() *v1.ResourceRequirements GetInterruptibleFailureThreshold() int32 diff --git a/flyteplugins/go/tasks/pluginmachinery/core/phase.go b/flyteplugins/go/tasks/pluginmachinery/core/phase.go index 376f261fac..5929a1f2e3 100644 --- a/flyteplugins/go/tasks/pluginmachinery/core/phase.go +++ b/flyteplugins/go/tasks/pluginmachinery/core/phase.go @@ -89,6 +89,8 @@ type ExternalResource struct { RetryAttempt uint32 // Phase (if exists) associated with the external resource Phase Phase + // Extensible field for custom, plugin-specific info + CustomInfo *structpb.Struct } type ReasonInfo struct { diff --git a/flyteplugins/go/tasks/pluginmachinery/core/template/template.go b/flyteplugins/go/tasks/pluginmachinery/core/template/template.go index 5aea60c4b9..9192cf851c 100644 --- a/flyteplugins/go/tasks/pluginmachinery/core/template/template.go +++ b/flyteplugins/go/tasks/pluginmachinery/core/template/template.go @@ -162,7 +162,7 @@ func render(ctx context.Context, inputTemplate string, params Parameters, perRet } func transformVarNameToStringVal(ctx context.Context, varName string, inputs *idlCore.LiteralMap) (string, error) { - inputVal, exists := inputs.Literals[varName] + inputVal, exists := inputs.GetLiterals()[varName] if !exists { return "", fmt.Errorf("requested input is not found [%s]", varName) } @@ -175,7 +175,7 @@ func transformVarNameToStringVal(ctx context.Context, varName string, inputs *id } func serializePrimitive(p *idlCore.Primitive) (string, error) { - switch o := p.Value.(type) { + switch o := p.GetValue().(type) { case *idlCore.Primitive_Integer: return fmt.Sprintf("%v", o.Integer), nil case *idlCore.Primitive_Boolean: @@ -189,22 +189,22 @@ func serializePrimitive(p *idlCore.Primitive) (string, error) { case *idlCore.Primitive_StringValue: return o.StringValue, nil default: - return "", fmt.Errorf("received an unexpected primitive type [%v]", reflect.TypeOf(p.Value)) + return "", fmt.Errorf("received an unexpected primitive type [%v]", reflect.TypeOf(p.GetValue())) } } func serializeLiteralScalar(l *idlCore.Scalar) (string, error) { - switch o := l.Value.(type) { + switch o := l.GetValue().(type) { case *idlCore.Scalar_Primitive: return serializePrimitive(o.Primitive) case *idlCore.Scalar_Blob: - return o.Blob.Uri, nil + return o.Blob.GetUri(), nil case *idlCore.Scalar_Schema: - return o.Schema.Uri, nil + return o.Schema.GetUri(), nil case *idlCore.Scalar_Binary: - binaryBytes := o.Binary.Value + binaryBytes := o.Binary.GetValue() var currVal any - if o.Binary.Tag == coreutils.MESSAGEPACK { + if o.Binary.GetTag() == coreutils.MESSAGEPACK { err := msgpack.Unmarshal(binaryBytes, &currVal) if err != nil { return "", fmt.Errorf("failed to unmarshal messagepack bytes with literal:[%v], err:[%v]", l, err) @@ -212,18 +212,18 @@ func serializeLiteralScalar(l *idlCore.Scalar) (string, error) { // TODO: Try to support Primitive_Datetime, Primitive_Duration, Flyte File, and Flyte Directory. return fmt.Sprintf("%v", currVal), nil } - return "", fmt.Errorf("unsupported binary tag [%v]", o.Binary.Tag) + return "", fmt.Errorf("unsupported binary tag [%v]", o.Binary.GetTag()) default: - return "", fmt.Errorf("received an unexpected scalar type [%v]", reflect.TypeOf(l.Value)) + return "", fmt.Errorf("received an unexpected scalar type [%v]", reflect.TypeOf(l.GetValue())) } } func serializeLiteral(ctx context.Context, l *idlCore.Literal) (string, error) { - switch o := l.Value.(type) { + switch o := l.GetValue().(type) { case *idlCore.Literal_Collection: - res := make([]string, 0, len(o.Collection.Literals)) - for _, sub := range o.Collection.Literals { + res := make([]string, 0, len(o.Collection.GetLiterals())) + for _, sub := range o.Collection.GetLiterals() { s, err := serializeLiteral(ctx, sub) if err != nil { return "", err @@ -237,6 +237,6 @@ func serializeLiteral(ctx context.Context, l *idlCore.Literal) (string, error) { return serializeLiteralScalar(o.Scalar) default: logger.Debugf(ctx, "received unexpected primitive type") - return "", fmt.Errorf("received an unexpected primitive type [%v]", reflect.TypeOf(l.Value)) + return "", fmt.Errorf("received an unexpected primitive type [%v]", reflect.TypeOf(l.GetValue())) } } diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/config.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/config.go index eb19015586..95be69699d 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/config.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/config.go @@ -64,8 +64,9 @@ var ( DefaultPodTemplateResync: config2.Duration{ Duration: 30 * time.Second, }, - UpdateBaseBackoffDuration: 10, - UpdateBackoffRetries: 5, + UpdateBaseBackoffDuration: 10, + UpdateBackoffRetries: 5, + AddTolerationsForExtendedResources: []string{}, } // K8sPluginConfigSection provides a singular top level config section for all plugins. @@ -214,6 +215,11 @@ type K8sPluginConfig struct { // Number of retries for exponential backoff when updating a resource. UpdateBackoffRetries int `json:"update-backoff-retries" pflag:",Number of retries for exponential backoff when updating a resource."` + + // Extended resources that should be added to the tolerations automatically. + AddTolerationsForExtendedResources []string `json:"add-tolerations-for-extended-resources" pflag:",Name of the extended resources for which tolerations should be added."` + + EnableDistributedErrorAggregation bool `json:"enable-distributed-error-aggregation" pflag:",If true, will aggregate errors of different worker pods for distributed tasks."` } // FlyteCoPilotConfig specifies configuration for the Flyte CoPilot system. FlyteCoPilot, allows running flytekit-less containers diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags.go index 4652d0bfd4..caa485ff39 100755 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags.go @@ -69,5 +69,6 @@ func (cfg K8sPluginConfig) GetPFlagSet(prefix string) *pflag.FlagSet { cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "send-object-events"), defaultK8sConfig.SendObjectEvents, "If true, will send k8s object events in TaskExecutionEvent updates.") cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "update-base-backoff-duration"), defaultK8sConfig.UpdateBaseBackoffDuration, "Initial delay in exponential backoff when updating a resource in milliseconds.") cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "update-backoff-retries"), defaultK8sConfig.UpdateBackoffRetries, "Number of retries for exponential backoff when updating a resource.") + cmdFlags.StringSlice(fmt.Sprintf("%v%v", prefix, "add-tolerations-for-extended-resources"), defaultK8sConfig.AddTolerationsForExtendedResources, "Name of the extended resources for which tolerations should be added.") return cmdFlags } diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags_test.go index cc46ffa466..cb50078620 100755 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/config/k8spluginconfig_flags_test.go @@ -365,4 +365,18 @@ func TestK8sPluginConfig_SetFlags(t *testing.T) { } }) }) + t.Run("Test_add-tolerations-for-extended-resources", func(t *testing.T) { + + t.Run("Override", func(t *testing.T) { + testValue := join_K8sPluginConfig(defaultK8sConfig.AddTolerationsForExtendedResources, ",") + + cmdFlags.Set("add-tolerations-for-extended-resources", testValue) + if vStringSlice, err := cmdFlags.GetStringSlice("add-tolerations-for-extended-resources"); err == nil { + testDecodeRaw_K8sPluginConfig(t, join_K8sPluginConfig(vStringSlice, ","), &actual.AddTolerationsForExtendedResources) + + } else { + assert.FailNow(t, err.Error()) + } + }) + }) } diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go index 32d2e0180e..501798c798 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go @@ -213,7 +213,7 @@ func BuildRawContainer(ctx context.Context, tCtx pluginscore.TaskExecutionContex containerName = rand.String(4) } - res, err := ToK8sResourceRequirements(taskContainer.Resources) + res, err := ToK8sResourceRequirements(taskContainer.GetResources()) if err != nil { return nil, err } diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go index 3b7aa88aeb..4e609c72b2 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go @@ -525,7 +525,7 @@ func TestAddFlyteCustomizationsToContainer(t *testing.T) { assert.EqualValues(t, container.Command, []string{"s3://input/path"}) assert.Len(t, container.Resources.Limits, 3) assert.Len(t, container.Resources.Requests, 3) - assert.Len(t, container.Env, 13) + assert.Len(t, container.Env, 12) } func TestAddFlyteCustomizationsToContainer_Resources(t *testing.T) { diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot.go index eaee5bce6c..a5e5e70099 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot.go @@ -20,7 +20,7 @@ import ( ) const ( - flyteSidecarContainerName = "sidecar" + flyteSidecarContainerName = "uploader" flyteInitContainerName = "downloader" ) @@ -162,7 +162,7 @@ func CalculateStorageSize(requirements *v1.ResourceRequirements) *resource.Quant } func AddCoPilotToContainer(ctx context.Context, cfg config.FlyteCoPilotConfig, c *v1.Container, iFace *core.TypedInterface, pilot *core.DataLoadingConfig) error { - if pilot == nil || !pilot.Enabled { + if pilot == nil || !pilot.GetEnabled() { return nil } logger.Infof(ctx, "Enabling CoPilot on main container [%s]", c.Name) @@ -175,7 +175,7 @@ func AddCoPilotToContainer(ctx context.Context, cfg config.FlyteCoPilotConfig, c c.SecurityContext.Capabilities.Add = append(c.SecurityContext.Capabilities.Add, pTraceCapability) if iFace != nil { - if iFace.Inputs != nil && len(iFace.Inputs.Variables) > 0 { + if iFace.GetInputs() != nil && len(iFace.GetInputs().GetVariables()) > 0 { inPath := cfg.DefaultInputDataPath if pilot.GetInputPath() != "" { inPath = pilot.GetInputPath() @@ -187,7 +187,7 @@ func AddCoPilotToContainer(ctx context.Context, cfg config.FlyteCoPilotConfig, c }) } - if iFace.Outputs != nil && len(iFace.Outputs.Variables) > 0 { + if iFace.GetOutputs() != nil && len(iFace.GetOutputs().GetVariables()) > 0 { outPath := cfg.DefaultOutputPath if pilot.GetOutputPath() != "" { outPath = pilot.GetOutputPath() @@ -202,16 +202,17 @@ func AddCoPilotToContainer(ctx context.Context, cfg config.FlyteCoPilotConfig, c } func AddCoPilotToPod(ctx context.Context, cfg config.FlyteCoPilotConfig, coPilotPod *v1.PodSpec, iFace *core.TypedInterface, taskExecMetadata core2.TaskExecutionMetadata, inputPaths io.InputFilePaths, outputPaths io.OutputFilePaths, pilot *core.DataLoadingConfig) (string, error) { - if pilot == nil || !pilot.Enabled { + if pilot == nil || !pilot.GetEnabled() { return "", nil } - logger.Infof(ctx, "CoPilot Enabled for task [%s]", taskExecMetadata.GetTaskExecutionID().GetID().TaskId.Name) + //nolint:protogetter + logger.Infof(ctx, "CoPilot Enabled for task [%s]", taskExecMetadata.GetTaskExecutionID().GetID().TaskId.GetName()) shareProcessNamespaceEnabled := true coPilotPod.ShareProcessNamespace = &shareProcessNamespaceEnabled primaryInitContainerName := "" if iFace != nil { - if iFace.Inputs != nil && len(iFace.Inputs.Variables) > 0 { + if iFace.GetInputs() != nil && len(iFace.GetInputs().GetVariables()) > 0 { inPath := cfg.DefaultInputDataPath if pilot.GetInputPath() != "" { inPath = pilot.GetInputPath() @@ -219,18 +220,19 @@ func AddCoPilotToPod(ctx context.Context, cfg config.FlyteCoPilotConfig, coPilot // TODO we should calculate input volume size based on the size of the inputs which is known ahead of time. We should store that as part of the metadata size := CalculateStorageSize(taskExecMetadata.GetOverrides().GetResources()) - logger.Infof(ctx, "Adding Input path [%s] of Size [%d] for Task [%s]", inPath, size, taskExecMetadata.GetTaskExecutionID().GetID().TaskId.Name) + //nolint:protogetter + logger.Infof(ctx, "Adding Input path [%s] of Size [%d] for Task [%s]", inPath, size, taskExecMetadata.GetTaskExecutionID().GetID().TaskId.GetName()) inputsVolumeMount := v1.VolumeMount{ Name: cfg.InputVolumeName, MountPath: inPath, } - format := pilot.Format + format := pilot.GetFormat() // Lets add the InputsVolume coPilotPod.Volumes = append(coPilotPod.Volumes, DataVolume(cfg.InputVolumeName, size)) // Lets add the Inputs init container - args, err := DownloadCommandArgs(inputPaths.GetInputPath(), outputPaths.GetOutputPrefixPath(), inPath, format, iFace.Inputs) + args, err := DownloadCommandArgs(inputPaths.GetInputPath(), outputPaths.GetOutputPrefixPath(), inPath, format, iFace.GetInputs()) if err != nil { return primaryInitContainerName, err } @@ -242,14 +244,15 @@ func AddCoPilotToPod(ctx context.Context, cfg config.FlyteCoPilotConfig, coPilot primaryInitContainerName = downloader.Name } - if iFace.Outputs != nil && len(iFace.Outputs.Variables) > 0 { + if iFace.GetOutputs() != nil && len(iFace.GetOutputs().GetVariables()) > 0 { outPath := cfg.DefaultOutputPath if pilot.GetOutputPath() != "" { outPath = pilot.GetOutputPath() } size := CalculateStorageSize(taskExecMetadata.GetOverrides().GetResources()) - logger.Infof(ctx, "Adding Output path [%s] of size [%d] for Task [%s]", size, outPath, taskExecMetadata.GetTaskExecutionID().GetID().TaskId.Name) + //nolint:protogetter + logger.Infof(ctx, "Adding Output path [%s] of size [%d] for Task [%s]", size, outPath, taskExecMetadata.GetTaskExecutionID().GetID().TaskId.GetName()) outputsVolumeMount := v1.VolumeMount{ Name: cfg.OutputVolumeName, diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot_test.go index aba18c85ac..f989e57567 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/copilot_test.go @@ -4,6 +4,7 @@ import ( "context" "encoding/base64" "reflect" + "strings" "testing" "time" @@ -107,6 +108,12 @@ func TestFlyteCoPilotContainer(t *testing.T) { assert.Error(t, err) cfg.Memory = old }) + + t.Run("sidecar-container-name-change", func(t *testing.T) { + c, err := FlyteCoPilotContainer(flyteSidecarContainerName, cfg, []string{"hello"}) + assert.NoError(t, err) + assert.Equal(t, "uploader", strings.Split(c.Name, "-")[1]) + }) } func TestDownloadCommandArgs(t *testing.T) { @@ -132,11 +139,11 @@ func TestDownloadCommandArgs(t *testing.T) { if assert.NoError(t, err) { vm := &core.VariableMap{} assert.NoError(t, proto.Unmarshal(serIFaceBytes, vm)) - assert.Len(t, vm.Variables, 2) - for k, v := range iFace.Variables { - v2, ok := vm.Variables[k] + assert.Len(t, vm.GetVariables(), 2) + for k, v := range iFace.GetVariables() { + v2, ok := vm.GetVariables()[k] assert.True(t, ok) - assert.Equal(t, v.Type.GetSimple(), v2.Type.GetSimple(), "for %s, types do not match", k) + assert.Equal(t, v.GetType().GetSimple(), v2.GetType().GetSimple(), "for %s, types do not match", k) } } } @@ -167,11 +174,11 @@ func TestSidecarCommandArgs(t *testing.T) { if assert.NoError(t, err) { if2 := &core.TypedInterface{} assert.NoError(t, proto.Unmarshal(serIFaceBytes, if2)) - assert.Len(t, if2.Outputs.Variables, 2) - for k, v := range iFace.Outputs.Variables { - v2, ok := if2.Outputs.Variables[k] + assert.Len(t, if2.GetOutputs().GetVariables(), 2) + for k, v := range iFace.GetOutputs().GetVariables() { + v2, ok := if2.GetOutputs().GetVariables()[k] assert.True(t, ok) - assert.Equal(t, v.Type.GetSimple(), v2.Type.GetSimple(), "for %s, types do not match", k) + assert.Equal(t, v.GetType().GetSimple(), v2.GetType().GetSimple(), "for %s, types do not match", k) } } } @@ -196,20 +203,20 @@ func assertContainerHasVolumeMounts(t *testing.T, cfg config.FlyteCoPilotConfig, for _, v := range c.VolumeMounts { vmap[v.Name] = v } - if iFace.Inputs != nil { + if iFace.GetInputs() != nil { path := cfg.DefaultInputDataPath - if pilot.InputPath != "" { - path = pilot.InputPath + if pilot.GetInputPath() != "" { + path = pilot.GetInputPath() } v, found := vmap[cfg.InputVolumeName] assert.Equal(t, path, v.MountPath, "Input Path does not match") assert.True(t, found, "Input volume mount expected but not found!") } - if iFace.Outputs != nil { + if iFace.GetOutputs() != nil { path := cfg.DefaultOutputPath - if pilot.OutputPath != "" { - path = pilot.OutputPath + if pilot.GetOutputPath() != "" { + path = pilot.GetOutputPath() } v, found := vmap[cfg.OutputVolumeName] assert.Equal(t, path, v.MountPath, "Output Path does not match") @@ -260,10 +267,10 @@ func assertPodHasCoPilot(t *testing.T, cfg config.FlyteCoPilotConfig, pilot *cor for _, v := range c.VolumeMounts { vmap[v.Name] = v } - if iFace.Inputs != nil { + if iFace.GetInputs() != nil { path := cfg.DefaultInputDataPath if pilot != nil { - path = pilot.InputPath + path = pilot.GetInputPath() } v, found := vmap[cfg.InputVolumeName] if c.Name == cfg.NamePrefix+flyteInitContainerName { @@ -274,10 +281,10 @@ func assertPodHasCoPilot(t *testing.T, cfg config.FlyteCoPilotConfig, pilot *cor } } - if iFace.Outputs != nil { + if iFace.GetOutputs() != nil { path := cfg.DefaultOutputPath if pilot != nil { - path = pilot.OutputPath + path = pilot.GetOutputPath() } v, found := vmap[cfg.OutputVolumeName] if c.Name == cfg.NamePrefix+flyteInitContainerName { diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds.go index 3cd000dd40..df74771961 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds.go @@ -12,6 +12,7 @@ import ( pluginsCore "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s/config" + propellerCfg "github.com/flyteorg/flyte/flytepropeller/pkg/controller/config" "github.com/flyteorg/flyte/flytestdlib/contextutils" ) @@ -43,105 +44,115 @@ func GetContextEnvVars(ownerCtx context.Context) []v1.EnvVar { func GetExecutionEnvVars(id pluginsCore.TaskExecutionID, consoleURL string) []v1.EnvVar { - if id == nil || id.GetID().NodeExecutionId == nil || id.GetID().NodeExecutionId.ExecutionId == nil { + //nolint:protogetter + if id == nil || id.GetID().NodeExecutionId == nil || id.GetID().NodeExecutionId.GetExecutionId() == nil { return []v1.EnvVar{} } // Execution level env variables. - nodeExecutionID := id.GetID().NodeExecutionId.ExecutionId - attemptNumber := strconv.Itoa(int(id.GetID().RetryAttempt)) + nodeExecutionID := id.GetID().NodeExecutionId.GetExecutionId() //nolint:protogetter + attemptNumber := strconv.Itoa(int(id.GetID().RetryAttempt)) //nolint:protogetter envVars := []v1.EnvVar{ { Name: "FLYTE_INTERNAL_EXECUTION_ID", - Value: nodeExecutionID.Name, + Value: nodeExecutionID.GetName(), }, { Name: "FLYTE_INTERNAL_EXECUTION_PROJECT", - Value: nodeExecutionID.Project, + Value: nodeExecutionID.GetProject(), }, { Name: "FLYTE_INTERNAL_EXECUTION_DOMAIN", - Value: nodeExecutionID.Domain, - }, - { - // FLYTE_INTERNAL_POD_NAME - Name: "_F_PN", - ValueFrom: &v1.EnvVarSource{ - FieldRef: &v1.ObjectFieldSelector{ - FieldPath: "metadata.name", - }, - }, + Value: nodeExecutionID.GetDomain(), }, { Name: "FLYTE_ATTEMPT_NUMBER", Value: attemptNumber, }, - // TODO: Fill in these - // { - // Name: "FLYTE_INTERNAL_EXECUTION_WORKFLOW", - // Value: "", - // }, - // { - // Name: "FLYTE_INTERNAL_EXECUTION_LAUNCHPLAN", - // Value: "", - // }, } if len(consoleURL) > 0 { consoleURL = strings.TrimRight(consoleURL, "/") envVars = append(envVars, v1.EnvVar{ Name: flyteExecutionURL, - Value: fmt.Sprintf("%s/projects/%s/domains/%s/executions/%s/nodeId/%s/nodes", consoleURL, nodeExecutionID.Project, nodeExecutionID.Domain, nodeExecutionID.Name, id.GetUniqueNodeID()), + Value: fmt.Sprintf("%s/projects/%s/domains/%s/executions/%s/nodeId/%s/nodes", consoleURL, nodeExecutionID.GetProject(), nodeExecutionID.GetDomain(), nodeExecutionID.GetName(), id.GetUniqueNodeID()), }) } // Task definition Level env variables. - if id.GetID().TaskId != nil { - taskID := id.GetID().TaskId + if id.GetID().TaskId != nil { //nolint:protogetter + taskID := id.GetID().TaskId //nolint:protogetter envVars = append(envVars, v1.EnvVar{ Name: "FLYTE_INTERNAL_TASK_PROJECT", - Value: taskID.Project, + Value: taskID.GetProject(), }, v1.EnvVar{ Name: "FLYTE_INTERNAL_TASK_DOMAIN", - Value: taskID.Domain, + Value: taskID.GetDomain(), }, v1.EnvVar{ Name: "FLYTE_INTERNAL_TASK_NAME", - Value: taskID.Name, + Value: taskID.GetName(), }, v1.EnvVar{ Name: "FLYTE_INTERNAL_TASK_VERSION", - Value: taskID.Version, + Value: taskID.GetVersion(), }, // Historic Task Definition Level env variables. // Remove these once SDK is migrated to use the new ones. v1.EnvVar{ Name: "FLYTE_INTERNAL_PROJECT", - Value: taskID.Project, + Value: taskID.GetProject(), }, v1.EnvVar{ Name: "FLYTE_INTERNAL_DOMAIN", - Value: taskID.Domain, + Value: taskID.GetDomain(), }, v1.EnvVar{ Name: "FLYTE_INTERNAL_NAME", - Value: taskID.Name, + Value: taskID.GetName(), }, v1.EnvVar{ Name: "FLYTE_INTERNAL_VERSION", - Value: taskID.Version, + Value: taskID.GetVersion(), }) } return envVars } +func GetLiteralOffloadingEnvVars() []v1.EnvVar { + propellerConfig := propellerCfg.GetConfig() + if !propellerConfig.LiteralOffloadingConfig.Enabled { + return []v1.EnvVar{} + } + + envVars := []v1.EnvVar{} + if propellerConfig.LiteralOffloadingConfig.MinSizeInMBForOffloading > 0 { + envVars = append(envVars, + v1.EnvVar{ + Name: "_F_L_MIN_SIZE_MB", + Value: strconv.FormatInt(propellerConfig.LiteralOffloadingConfig.MinSizeInMBForOffloading, 10), + }, + ) + } + if propellerConfig.LiteralOffloadingConfig.MaxSizeInMBForOffloading > 0 { + envVars = append(envVars, + v1.EnvVar{ + Name: "_F_L_MAX_SIZE_MB", + Value: strconv.FormatInt(propellerConfig.LiteralOffloadingConfig.MaxSizeInMBForOffloading, 10), + }, + ) + } + return envVars +} + func DecorateEnvVars(ctx context.Context, envVars []v1.EnvVar, envFroms []v1.EnvFromSource, taskEnvironmentVariables map[string]string, id pluginsCore.TaskExecutionID, consoleURL string) ([]v1.EnvVar, []v1.EnvFromSource) { envVars = append(envVars, GetContextEnvVars(ctx)...) envVars = append(envVars, GetExecutionEnvVars(id, consoleURL)...) + envVars = append(envVars, GetLiteralOffloadingEnvVars()...) for k, v := range taskEnvironmentVariables { envVars = append(envVars, v1.EnvVar{Name: k, Value: v}) diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go index fd4828fbbd..0ed5fc0337 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go @@ -14,6 +14,7 @@ import ( "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" pluginsCore "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s/config" + propellerCfg "github.com/flyteorg/flyte/flytepropeller/pkg/controller/config" "github.com/flyteorg/flyte/flytestdlib/contextutils" ) @@ -27,13 +28,13 @@ func TestGetExecutionEnvVars(t *testing.T) { }{ { "no-console-url", - 13, + 12, "", nil, }, { "with-console-url", - 14, + 13, "scheme://host/path", &v12.EnvVar{ Name: "FLYTE_EXECUTION_URL", @@ -42,7 +43,7 @@ func TestGetExecutionEnvVars(t *testing.T) { }, { "with-console-url-ending-in-single-slash", - 14, + 13, "scheme://host/path/", &v12.EnvVar{ Name: "FLYTE_EXECUTION_URL", @@ -51,7 +52,7 @@ func TestGetExecutionEnvVars(t *testing.T) { }, { "with-console-url-ending-in-multiple-slashes", - 14, + 13, "scheme://host/path////", &v12.EnvVar{ Name: "FLYTE_EXECUTION_URL", @@ -63,7 +64,7 @@ func TestGetExecutionEnvVars(t *testing.T) { envVars := GetExecutionEnvVars(mock, tt.consoleURL) assert.Len(t, envVars, tt.expectedEnvVars) if tt.expectedEnvVar != nil { - assert.True(t, proto.Equal(&envVars[5], tt.expectedEnvVar)) + assert.True(t, proto.Equal(&envVars[4], tt.expectedEnvVar)) } } } @@ -304,6 +305,8 @@ func TestDecorateEnvVars(t *testing.T) { expected := append(defaultEnv, GetContextEnvVars(ctx)...) expected = append(expected, GetExecutionEnvVars(mockTaskExecutionIdentifier{}, "")...) + expectedOffloaded := append(expected, v12.EnvVar{Name: "_F_L_MIN_SIZE_MB", Value: "1"}) + expectedOffloaded = append(expectedOffloaded, v12.EnvVar{Name: "_F_L_MAX_SIZE_MB", Value: "42"}) aggregated := append(expected, v12.EnvVar{Name: "k", Value: "v"}) type args struct { @@ -315,17 +318,77 @@ func TestDecorateEnvVars(t *testing.T) { args args additionEnvVar map[string]string additionEnvVarFromEnv map[string]string + offloadingEnabled bool + offloadingEnvVar map[string]string executionEnvVar map[string]string consoleURL string want []v12.EnvVar }{ - {"no-additional", args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, emptyEnvVar, emptyEnvVar, emptyEnvVar, "", expected}, - {"with-additional", args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, additionalEnv, emptyEnvVar, emptyEnvVar, "", aggregated}, - {"from-env", args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, emptyEnvVar, envVarsFromEnv, emptyEnvVar, "", aggregated}, - {"from-execution-metadata", args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, emptyEnvVar, emptyEnvVar, additionalEnv, "", aggregated}, + { + "no-additional", + args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, + emptyEnvVar, + emptyEnvVar, + false, + emptyEnvVar, + emptyEnvVar, + "", + expected, + }, + { + "no-additional-offloading-enabled", + args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, + emptyEnvVar, + emptyEnvVar, + true, + emptyEnvVar, + emptyEnvVar, + "", + expectedOffloaded, + }, + { + "with-additional", + args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, + additionalEnv, + emptyEnvVar, + false, + emptyEnvVar, + emptyEnvVar, + "", + aggregated, + }, + { + "from-env", + args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, + emptyEnvVar, + envVarsFromEnv, + false, + emptyEnvVar, + emptyEnvVar, + "", + aggregated, + }, + { + "from-execution-metadata", + args{envVars: defaultEnv, id: mockTaskExecutionIdentifier{}}, + emptyEnvVar, + emptyEnvVar, + false, + emptyEnvVar, + additionalEnv, + "", + aggregated, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + cfg := propellerCfg.GetConfig() + cfg.LiteralOffloadingConfig = propellerCfg.LiteralOffloadingConfig{ + Enabled: tt.offloadingEnabled, + MinSizeInMBForOffloading: 1, + MaxSizeInMBForOffloading: 42, + } + assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ DefaultEnvVars: tt.additionEnvVar, DefaultEnvVarsFromEnv: tt.additionEnvVarFromEnv, diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go index 229f963968..6beca78f54 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go @@ -11,6 +11,7 @@ import ( "github.com/imdario/mergo" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/util/sets" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" pluginserrors "github.com/flyteorg/flyte/flyteplugins/go/tasks/errors" @@ -287,15 +288,15 @@ func BuildRawPod(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (*v } case *core.TaskTemplate_K8SPod: // handles pod tasks that marshal the pod spec to the k8s_pod task target. - if target.K8SPod.PodSpec == nil { + if target.K8SPod.GetPodSpec() == nil { return nil, nil, "", pluginserrors.Errorf(pluginserrors.BadTaskSpecification, "Pod tasks with task type version > 1 should specify their target as a K8sPod with a defined pod spec") } - err := utils.UnmarshalStructToObj(target.K8SPod.PodSpec, &podSpec) + err := utils.UnmarshalStructToObj(target.K8SPod.GetPodSpec(), &podSpec) if err != nil { return nil, nil, "", pluginserrors.Errorf(pluginserrors.BadTaskSpecification, - "Unable to unmarshal task k8s pod [%v], Err: [%v]", target.K8SPod.PodSpec, err.Error()) + "Unable to unmarshal task k8s pod [%v], Err: [%v]", target.K8SPod.GetPodSpec(), err.Error()) } // get primary container name @@ -306,9 +307,9 @@ func BuildRawPod(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (*v } // update annotations and labels - if taskTemplate.GetK8SPod().Metadata != nil { - mergeMapInto(target.K8SPod.Metadata.Annotations, objectMeta.Annotations) - mergeMapInto(target.K8SPod.Metadata.Labels, objectMeta.Labels) + if taskTemplate.GetK8SPod().GetMetadata() != nil { + mergeMapInto(target.K8SPod.GetMetadata().GetAnnotations(), objectMeta.Annotations) + mergeMapInto(target.K8SPod.GetMetadata().GetLabels(), objectMeta.Labels) } default: return nil, nil, "", pluginserrors.Errorf(pluginserrors.BadTaskSpecification, @@ -393,7 +394,7 @@ func ApplyFlytePodConfiguration(ctx context.Context, tCtx pluginsCore.TaskExecut if dataLoadingConfig != nil { if err := AddCoPilotToContainer(ctx, config.GetK8sPluginConfig().CoPilot, - primaryContainer, taskTemplate.Interface, dataLoadingConfig); err != nil { + primaryContainer, taskTemplate.GetInterface(), dataLoadingConfig); err != nil { return nil, nil, err } @@ -445,6 +446,54 @@ func ApplyContainerImageOverride(podSpec *v1.PodSpec, containerImage string, pri } } +func addTolerationInPodSpec(podSpec *v1.PodSpec, toleration *v1.Toleration) *v1.PodSpec { + podTolerations := podSpec.Tolerations + + var newTolerations []v1.Toleration + for i := range podTolerations { + if toleration.MatchToleration(&podTolerations[i]) { + return podSpec + } + newTolerations = append(newTolerations, podTolerations[i]) + } + newTolerations = append(newTolerations, *toleration) + podSpec.Tolerations = newTolerations + return podSpec +} + +func AddTolerationsForExtendedResources(podSpec *v1.PodSpec) *v1.PodSpec { + if podSpec == nil { + podSpec = &v1.PodSpec{} + } + + resources := sets.NewString() + for _, container := range podSpec.Containers { + for _, extendedResource := range config.GetK8sPluginConfig().AddTolerationsForExtendedResources { + if _, ok := container.Resources.Requests[v1.ResourceName(extendedResource)]; ok { + resources.Insert(extendedResource) + } + } + } + + for _, container := range podSpec.InitContainers { + for _, extendedResource := range config.GetK8sPluginConfig().AddTolerationsForExtendedResources { + if _, ok := container.Resources.Requests[v1.ResourceName(extendedResource)]; ok { + resources.Insert(extendedResource) + } + } + } + + for _, resource := range resources.List() { + addTolerationInPodSpec(podSpec, &v1.Toleration{ + Key: resource, + Operator: v1.TolerationOpExists, + Effect: v1.TaintEffectNoSchedule, + }) + } + + return podSpec +} + // ToK8sPodSpec builds a PodSpec and ObjectMeta based on the definition passed by the TaskExecutionContext. This // involves parsing the raw PodSpec definition and applying all Flyte configuration options. func ToK8sPodSpec(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (*v1.PodSpec, *metav1.ObjectMeta, string, error) { @@ -460,6 +509,8 @@ func ToK8sPodSpec(ctx context.Context, tCtx pluginsCore.TaskExecutionContext) (* return nil, nil, "", err } + podSpec = AddTolerationsForExtendedResources(podSpec) + return podSpec, objectMeta, primaryContainerName, nil } @@ -483,11 +534,11 @@ func getBasePodTemplate(ctx context.Context, tCtx pluginsCore.TaskExecutionConte } var podTemplate *v1.PodTemplate - if taskTemplate.Metadata != nil && len(taskTemplate.Metadata.PodTemplateName) > 0 { + if taskTemplate.GetMetadata() != nil && len(taskTemplate.GetMetadata().GetPodTemplateName()) > 0 { // retrieve PodTemplate by name from PodTemplateStore - podTemplate = podTemplateStore.LoadOrDefault(tCtx.TaskExecutionMetadata().GetNamespace(), taskTemplate.Metadata.PodTemplateName) + podTemplate = podTemplateStore.LoadOrDefault(tCtx.TaskExecutionMetadata().GetNamespace(), taskTemplate.GetMetadata().GetPodTemplateName()) if podTemplate == nil { - return nil, pluginserrors.Errorf(pluginserrors.BadTaskSpecification, "PodTemplate '%s' does not exist", taskTemplate.Metadata.PodTemplateName) + return nil, pluginserrors.Errorf(pluginserrors.BadTaskSpecification, "PodTemplate '%s' does not exist", taskTemplate.GetMetadata().GetPodTemplateName()) } } else { // check for default PodTemplate diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go index 9797b5e05b..0a70cdd895 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go @@ -3,6 +3,7 @@ package flytek8s import ( "context" "encoding/json" + "fmt" "io/ioutil" "path/filepath" "reflect" @@ -1529,7 +1530,7 @@ func TestDemystifyPendingTimeout(t *testing.T) { taskStatus, err := DemystifyPending(s, pluginsCore.TaskInfo{}) assert.NoError(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, taskStatus.Phase()) - assert.Equal(t, "PodPendingTimeout", taskStatus.Err().Code) + assert.Equal(t, "PodPendingTimeout", taskStatus.Err().GetCode()) assert.True(t, taskStatus.CleanupOnFailure()) }) } @@ -1549,7 +1550,7 @@ func TestDemystifySuccess(t *testing.T) { }, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "OOMKilled", phaseInfo.Err().Code) + assert.Equal(t, "OOMKilled", phaseInfo.Err().GetCode()) }) t.Run("InitContainer OOMKilled", func(t *testing.T) { @@ -1566,7 +1567,7 @@ func TestDemystifySuccess(t *testing.T) { }, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "OOMKilled", phaseInfo.Err().Code) + assert.Equal(t, "OOMKilled", phaseInfo.Err().GetCode()) }) t.Run("success", func(t *testing.T) { @@ -1581,16 +1582,16 @@ func TestDemystifyFailure(t *testing.T) { phaseInfo, err := DemystifyFailure(v1.PodStatus{}, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "UnknownError", phaseInfo.Err().Code) - assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().Kind) + assert.Equal(t, "UnknownError", phaseInfo.Err().GetCode()) + assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().GetKind()) }) t.Run("known-error", func(t *testing.T) { phaseInfo, err := DemystifyFailure(v1.PodStatus{Reason: "hello"}, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "hello", phaseInfo.Err().Code) - assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().Kind) + assert.Equal(t, "hello", phaseInfo.Err().GetCode()) + assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().GetKind()) }) t.Run("OOMKilled", func(t *testing.T) { @@ -1608,8 +1609,8 @@ func TestDemystifyFailure(t *testing.T) { }, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "OOMKilled", phaseInfo.Err().Code) - assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().Kind) + assert.Equal(t, "OOMKilled", phaseInfo.Err().GetCode()) + assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().GetKind()) }) t.Run("SIGKILL", func(t *testing.T) { @@ -1627,8 +1628,8 @@ func TestDemystifyFailure(t *testing.T) { }, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "Interrupted", phaseInfo.Err().Code) - assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().Kind) + assert.Equal(t, "Interrupted", phaseInfo.Err().GetCode()) + assert.Equal(t, core.ExecutionError_USER, phaseInfo.Err().GetKind()) }) t.Run("GKE kubelet graceful node shutdown", func(t *testing.T) { @@ -1649,9 +1650,9 @@ func TestDemystifyFailure(t *testing.T) { }, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "Interrupted", phaseInfo.Err().Code) - assert.Equal(t, core.ExecutionError_SYSTEM, phaseInfo.Err().Kind) - assert.Contains(t, phaseInfo.Err().Message, containerReason) + assert.Equal(t, "Interrupted", phaseInfo.Err().GetCode()) + assert.Equal(t, core.ExecutionError_SYSTEM, phaseInfo.Err().GetKind()) + assert.Contains(t, phaseInfo.Err().GetMessage(), containerReason) }) t.Run("GKE kubelet graceful node shutdown", func(t *testing.T) { @@ -1672,9 +1673,9 @@ func TestDemystifyFailure(t *testing.T) { }, pluginsCore.TaskInfo{}) assert.Nil(t, err) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "Interrupted", phaseInfo.Err().Code) - assert.Equal(t, core.ExecutionError_SYSTEM, phaseInfo.Err().Kind) - assert.Contains(t, phaseInfo.Err().Message, containerReason) + assert.Equal(t, "Interrupted", phaseInfo.Err().GetCode()) + assert.Equal(t, core.ExecutionError_SYSTEM, phaseInfo.Err().GetKind()) + assert.Contains(t, phaseInfo.Err().GetMessage(), containerReason) }) } @@ -1705,8 +1706,8 @@ func TestDemystifyPending_testcases(t *testing.T) { assert.NotNil(t, p) assert.Equal(t, p.Phase(), pluginsCore.PhaseRetryableFailure) if assert.NotNil(t, p.Err()) { - assert.Equal(t, p.Err().Code, tt.errCode) - assert.Equal(t, p.Err().Message, tt.message) + assert.Equal(t, p.Err().GetCode(), tt.errCode) + assert.Equal(t, p.Err().GetMessage(), tt.message) } } } @@ -1765,8 +1766,8 @@ func TestDeterminePrimaryContainerPhase(t *testing.T) { }, }, info) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, "foo", phaseInfo.Err().Code) - assert.Equal(t, "\r\n[primary] terminated with exit code (1). Reason [foo]. Message: \nfoo failed.", phaseInfo.Err().Message) + assert.Equal(t, "foo", phaseInfo.Err().GetCode()) + assert.Equal(t, "\r\n[primary] terminated with exit code (1). Reason [foo]. Message: \nfoo failed.", phaseInfo.Err().GetMessage()) }) t.Run("primary container succeeded", func(t *testing.T) { phaseInfo := DeterminePrimaryContainerPhase(primaryContainerName, []v1.ContainerStatus{ @@ -1786,8 +1787,8 @@ func TestDeterminePrimaryContainerPhase(t *testing.T) { secondaryContainer, }, info) assert.Equal(t, pluginsCore.PhasePermanentFailure, phaseInfo.Phase()) - assert.Equal(t, PrimaryContainerNotFound, phaseInfo.Err().Code) - assert.Equal(t, "Primary container [primary] not found in pod's container statuses", phaseInfo.Err().Message) + assert.Equal(t, PrimaryContainerNotFound, phaseInfo.Err().GetCode()) + assert.Equal(t, "Primary container [primary] not found in pod's container statuses", phaseInfo.Err().GetMessage()) }) t.Run("primary container failed with OOMKilled", func(t *testing.T) { phaseInfo := DeterminePrimaryContainerPhase(primaryContainerName, []v1.ContainerStatus{ @@ -1803,8 +1804,8 @@ func TestDeterminePrimaryContainerPhase(t *testing.T) { }, }, info) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) - assert.Equal(t, OOMKilled, phaseInfo.Err().Code) - assert.Equal(t, "\r\n[primary] terminated with exit code (0). Reason [OOMKilled]. Message: \nfoo failed.", phaseInfo.Err().Message) + assert.Equal(t, OOMKilled, phaseInfo.Err().GetCode()) + assert.Equal(t, "\r\n[primary] terminated with exit code (0). Reason [OOMKilled]. Message: \nfoo failed.", phaseInfo.Err().GetMessage()) }) } @@ -2244,3 +2245,112 @@ func TestAddFlyteCustomizationsToContainer_SetConsoleUrl(t *testing.T) { }) } } + +func TestAddTolerationsForExtendedResources(t *testing.T) { + gpuResourceName := v1.ResourceName("nvidia.com/gpu") + addTolerationResourceName := v1.ResourceName("foo/bar") + noTolerationResourceName := v1.ResourceName("foo/baz") + assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ + GpuResourceName: gpuResourceName, + AddTolerationsForExtendedResources: []string{ + gpuResourceName.String(), + addTolerationResourceName.String(), + }, + })) + + podSpec := &v1.PodSpec{ + Containers: []v1.Container{ + v1.Container{ + Resources: v1.ResourceRequirements{ + Requests: v1.ResourceList{ + gpuResourceName: resource.MustParse("1"), + addTolerationResourceName: resource.MustParse("1"), + noTolerationResourceName: resource.MustParse("1"), + }, + }, + }, + }, + Tolerations: []v1.Toleration{ + { + Key: "foo", + Operator: v1.TolerationOpExists, + Effect: v1.TaintEffectNoSchedule, + }, + }, + } + + podSpec = AddTolerationsForExtendedResources(podSpec) + fmt.Printf("%v\n", podSpec.Tolerations) + assert.Equal(t, 3, len(podSpec.Tolerations)) + assert.Equal(t, addTolerationResourceName.String(), podSpec.Tolerations[1].Key) + assert.Equal(t, v1.TolerationOpExists, podSpec.Tolerations[1].Operator) + assert.Equal(t, v1.TaintEffectNoSchedule, podSpec.Tolerations[1].Effect) + assert.Equal(t, gpuResourceName.String(), podSpec.Tolerations[2].Key) + assert.Equal(t, v1.TolerationOpExists, podSpec.Tolerations[2].Operator) + assert.Equal(t, v1.TaintEffectNoSchedule, podSpec.Tolerations[2].Effect) + + podSpec = &v1.PodSpec{ + InitContainers: []v1.Container{ + v1.Container{ + Resources: v1.ResourceRequirements{ + Requests: v1.ResourceList{ + gpuResourceName: resource.MustParse("1"), + addTolerationResourceName: resource.MustParse("1"), + noTolerationResourceName: resource.MustParse("1"), + }, + }, + }, + }, + Tolerations: []v1.Toleration{ + { + Key: "foo", + Operator: v1.TolerationOpExists, + Effect: v1.TaintEffectNoSchedule, + }, + }, + } + + podSpec = AddTolerationsForExtendedResources(podSpec) + assert.Equal(t, 3, len(podSpec.Tolerations)) + assert.Equal(t, addTolerationResourceName.String(), podSpec.Tolerations[1].Key) + assert.Equal(t, v1.TolerationOpExists, podSpec.Tolerations[1].Operator) + assert.Equal(t, v1.TaintEffectNoSchedule, podSpec.Tolerations[1].Effect) + assert.Equal(t, gpuResourceName.String(), podSpec.Tolerations[2].Key) + assert.Equal(t, v1.TolerationOpExists, podSpec.Tolerations[2].Operator) + assert.Equal(t, v1.TaintEffectNoSchedule, podSpec.Tolerations[2].Effect) + + podSpec = &v1.PodSpec{ + Containers: []v1.Container{ + v1.Container{ + Resources: v1.ResourceRequirements{ + Requests: v1.ResourceList{ + gpuResourceName: resource.MustParse("1"), + addTolerationResourceName: resource.MustParse("1"), + noTolerationResourceName: resource.MustParse("1"), + }, + }, + }, + }, + Tolerations: []v1.Toleration{ + { + Key: "foo", + Operator: v1.TolerationOpExists, + Effect: v1.TaintEffectNoSchedule, + }, + { + Key: gpuResourceName.String(), + Operator: v1.TolerationOpExists, + Effect: v1.TaintEffectNoSchedule, + }, + }, + } + + podSpec = AddTolerationsForExtendedResources(podSpec) + assert.Equal(t, 3, len(podSpec.Tolerations)) + assert.Equal(t, gpuResourceName.String(), podSpec.Tolerations[1].Key) + assert.Equal(t, v1.TolerationOpExists, podSpec.Tolerations[1].Operator) + assert.Equal(t, v1.TaintEffectNoSchedule, podSpec.Tolerations[1].Effect) + assert.Equal(t, addTolerationResourceName.String(), podSpec.Tolerations[2].Key) + assert.Equal(t, v1.TolerationOpExists, podSpec.Tolerations[2].Operator) + assert.Equal(t, v1.TaintEffectNoSchedule, podSpec.Tolerations[2].Effect) +} diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go index ef7807aadd..fab4f84997 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go @@ -12,7 +12,7 @@ import ( func ToK8sEnvVar(env []*core.KeyValuePair) []v1.EnvVar { envVars := make([]v1.EnvVar, 0, len(env)) for _, kv := range env { - envVars = append(envVars, v1.EnvVar{Name: kv.Key, Value: kv.Value}) + envVars = append(envVars, v1.EnvVar{Name: kv.GetKey(), Value: kv.GetValue()}) } return envVars } @@ -22,12 +22,12 @@ func ToK8sEnvVar(env []*core.KeyValuePair) []v1.EnvVar { func ToK8sResourceList(resources []*core.Resources_ResourceEntry) (v1.ResourceList, error) { k8sResources := make(v1.ResourceList, len(resources)) for _, r := range resources { - rVal := r.Value + rVal := r.GetValue() v, err := resource.ParseQuantity(rVal) if err != nil { return nil, errors.Wrap(err, "Failed to parse resource as a valid quantity.") } - switch r.Name { + switch r.GetName() { case core.Resources_CPU: if !v.IsZero() { k8sResources[v1.ResourceCPU] = v @@ -54,11 +54,11 @@ func ToK8sResourceRequirements(resources *core.Resources) (*v1.ResourceRequireme if resources == nil { return res, nil } - req, err := ToK8sResourceList(resources.Requests) + req, err := ToK8sResourceList(resources.GetRequests()) if err != nil { return res, err } - lim, err := ToK8sResourceList(resources.Limits) + lim, err := ToK8sResourceList(resources.GetLimits()) if err != nil { return res, err } diff --git a/flyteplugins/go/tasks/pluginmachinery/internal/webapi/cache.go b/flyteplugins/go/tasks/pluginmachinery/internal/webapi/cache.go index 7569abd90e..b9efcd7372 100644 --- a/flyteplugins/go/tasks/pluginmachinery/internal/webapi/cache.go +++ b/flyteplugins/go/tasks/pluginmachinery/internal/webapi/cache.go @@ -179,7 +179,7 @@ func NewResourceCache(ctx context.Context, name string, client Client, cfg webap workqueue.NewMaxOfRateLimiter( workqueue.NewItemExponentialFailureRateLimiter(5*time.Millisecond, 1000*time.Second), &workqueue.BucketRateLimiter{Limiter: rate.NewLimiter(rate.Limit(rateCfg.QPS), rateCfg.Burst)}, - ), cfg.ResyncInterval.Duration, cfg.Workers, cfg.Size, + ), cfg.ResyncInterval.Duration, uint(cfg.Workers), uint(cfg.Size), // #nosec G115 scope.NewSubScope("cache")) if err != nil { diff --git a/flyteplugins/go/tasks/pluginmachinery/io/mocks/error_reader.go b/flyteplugins/go/tasks/pluginmachinery/io/mocks/error_reader.go new file mode 100644 index 0000000000..161e863f49 --- /dev/null +++ b/flyteplugins/go/tasks/pluginmachinery/io/mocks/error_reader.go @@ -0,0 +1,93 @@ +// Code generated by mockery v1.0.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + io "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/io" + mock "github.com/stretchr/testify/mock" +) + +// ErrorReader is an autogenerated mock type for the ErrorReader type +type ErrorReader struct { + mock.Mock +} + +type ErrorReader_IsError struct { + *mock.Call +} + +func (_m ErrorReader_IsError) Return(_a0 bool, _a1 error) *ErrorReader_IsError { + return &ErrorReader_IsError{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *ErrorReader) OnIsError(ctx context.Context) *ErrorReader_IsError { + c_call := _m.On("IsError", ctx) + return &ErrorReader_IsError{Call: c_call} +} + +func (_m *ErrorReader) OnIsErrorMatch(matchers ...interface{}) *ErrorReader_IsError { + c_call := _m.On("IsError", matchers...) + return &ErrorReader_IsError{Call: c_call} +} + +// IsError provides a mock function with given fields: ctx +func (_m *ErrorReader) IsError(ctx context.Context) (bool, error) { + ret := _m.Called(ctx) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context) bool); ok { + r0 = rf(ctx) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +type ErrorReader_ReadError struct { + *mock.Call +} + +func (_m ErrorReader_ReadError) Return(_a0 io.ExecutionError, _a1 error) *ErrorReader_ReadError { + return &ErrorReader_ReadError{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *ErrorReader) OnReadError(ctx context.Context) *ErrorReader_ReadError { + c_call := _m.On("ReadError", ctx) + return &ErrorReader_ReadError{Call: c_call} +} + +func (_m *ErrorReader) OnReadErrorMatch(matchers ...interface{}) *ErrorReader_ReadError { + c_call := _m.On("ReadError", matchers...) + return &ErrorReader_ReadError{Call: c_call} +} + +// ReadError provides a mock function with given fields: ctx +func (_m *ErrorReader) ReadError(ctx context.Context) (io.ExecutionError, error) { + ret := _m.Called(ctx) + + var r0 io.ExecutionError + if rf, ok := ret.Get(0).(func(context.Context) io.ExecutionError); ok { + r0 = rf(ctx) + } else { + r0 = ret.Get(0).(io.ExecutionError) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/flyteplugins/go/tasks/pluginmachinery/ioutils/in_memory_output_reader_test.go b/flyteplugins/go/tasks/pluginmachinery/ioutils/in_memory_output_reader_test.go index ad82fca8a3..52b58b732d 100644 --- a/flyteplugins/go/tasks/pluginmachinery/ioutils/in_memory_output_reader_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/ioutils/in_memory_output_reader_test.go @@ -39,7 +39,7 @@ func TestInMemoryOutputReader(t *testing.T) { assert.NoError(t, err) literalMap, executionErr, err := or.Read(ctx) - assert.Equal(t, lt, literalMap.Literals) + assert.Equal(t, lt, literalMap.GetLiterals()) assert.Nil(t, executionErr) assert.NoError(t, err) } diff --git a/flyteplugins/go/tasks/pluginmachinery/ioutils/precomputed_shardselector.go b/flyteplugins/go/tasks/pluginmachinery/ioutils/precomputed_shardselector.go index 2a20272f6e..909d1fedfa 100644 --- a/flyteplugins/go/tasks/pluginmachinery/ioutils/precomputed_shardselector.go +++ b/flyteplugins/go/tasks/pluginmachinery/ioutils/precomputed_shardselector.go @@ -68,6 +68,6 @@ func NewBase36PrefixShardSelector(ctx context.Context) (ShardSelector, error) { func NewConstantShardSelector(shards []string) ShardSelector { return &PrecomputedShardSelector{ precomputedPrefixes: shards, - buckets: uint32(len(shards)), + buckets: uint32(len(shards)), // #nosec G115 } } diff --git a/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader.go b/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader.go index ae880f3640..c28f5ac94d 100644 --- a/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader.go +++ b/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader.go @@ -105,7 +105,7 @@ func (s *singleFileErrorReader) IsError(ctx context.Context) (bool, error) { } func errorDoc2ExecutionError(errorDoc *core.ErrorDocument, errorFilePath storage.DataReference) io.ExecutionError { - if errorDoc.Error == nil { + if errorDoc.GetError() == nil { return io.ExecutionError{ IsRecoverable: true, ExecutionError: &core.ExecutionError{ @@ -117,15 +117,15 @@ func errorDoc2ExecutionError(errorDoc *core.ErrorDocument, errorFilePath storage } executionError := io.ExecutionError{ ExecutionError: &core.ExecutionError{ - Code: errorDoc.Error.Code, - Message: errorDoc.Error.Message, - Kind: errorDoc.Error.Origin, - Timestamp: errorDoc.Error.Timestamp, - Worker: errorDoc.Error.Worker, + Code: errorDoc.GetError().GetCode(), + Message: errorDoc.GetError().GetMessage(), + Kind: errorDoc.GetError().GetOrigin(), + Timestamp: errorDoc.GetError().GetTimestamp(), + Worker: errorDoc.GetError().GetWorker(), }, } - if errorDoc.Error.Kind == core.ContainerError_RECOVERABLE { + if errorDoc.GetError().GetKind() == core.ContainerError_RECOVERABLE { executionError.IsRecoverable = true } @@ -201,7 +201,7 @@ func (e *earliestFileErrorReader) ReadError(ctx context.Context) (io.ExecutionEr if err != nil { return io.ExecutionError{}, errors.Wrapf(err, "failed to read error file @[%s]", errorFilePath.String()) } - timestamp := errorDoc.Error.GetTimestamp().AsTime() + timestamp := errorDoc.GetError().GetTimestamp().AsTime() if earliestTimestamp == nil || earliestTimestamp.After(timestamp) { earliestExecutionError = errorDoc2ExecutionError(errorDoc, errorFilePath) earliestTimestamp = ×tamp diff --git a/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader_test.go b/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader_test.go index 1cd7099f78..b2dbb0ba55 100644 --- a/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader_test.go @@ -91,7 +91,7 @@ func TestReadOrigin(t *testing.T) { incomingErrorDoc := args.Get(2) assert.NotNil(t, incomingErrorDoc) casted := incomingErrorDoc.(*core.ErrorDocument) - casted.Error = errorDoc.Error + casted.Error = errorDoc.GetError() }).Return(nil) store.OnHead(ctx, storage.DataReference("deck.html")).Return(MemoryMetadata{ @@ -129,7 +129,7 @@ func TestReadOrigin(t *testing.T) { incomingErrorDoc := args.Get(2) assert.NotNil(t, incomingErrorDoc) casted := incomingErrorDoc.(*core.ErrorDocument) - casted.Error = errorDoc.Error + casted.Error = errorDoc.GetError() }).Return(nil) maxPayloadSize := int64(0) @@ -168,21 +168,21 @@ func TestReadOrigin(t *testing.T) { incomingErrorDoc := args.Get(2) assert.NotNil(t, incomingErrorDoc) casted := incomingErrorDoc.(*core.ErrorDocument) - casted.Error = errorDoc.Error + casted.Error = errorDoc.GetError() }).Return(nil) store.OnList(ctx, storage.DataReference("s3://errors/error"), 1000, storage.NewCursorAtStart()).Return( - []storage.DataReference{"error-0.pb", "error-1.pb", "error-2.pb"}, storage.NewCursorAtEnd(), nil) + []storage.DataReference{"s3://errors/error-0.pb", "s3://errors/error-1.pb", "s3://errors/error-2.pb"}, storage.NewCursorAtEnd(), nil) - store.OnHead(ctx, storage.DataReference("error-0.pb")).Return(MemoryMetadata{ + store.OnHead(ctx, storage.DataReference("s3://errors/error-0.pb")).Return(MemoryMetadata{ exists: true, }, nil) - store.OnHead(ctx, storage.DataReference("error-1.pb")).Return(MemoryMetadata{ + store.OnHead(ctx, storage.DataReference("s3://errors/error-1.pb")).Return(MemoryMetadata{ exists: true, }, nil) - store.OnHead(ctx, storage.DataReference("error-2.pb")).Return(MemoryMetadata{ + store.OnHead(ctx, storage.DataReference("s3://errors/error-2.pb")).Return(MemoryMetadata{ exists: true, }, nil) @@ -227,13 +227,13 @@ func TestReadOrigin(t *testing.T) { incomingErrorDoc := args.Get(2) assert.NotNil(t, incomingErrorDoc) casted := incomingErrorDoc.(*core.ErrorDocument) - casted.Error = errorDoc.Error + casted.Error = errorDoc.GetError() }).Return(nil) store.OnList(ctx, storage.DataReference("s3://errors/error"), 1000, storage.NewCursorAtStart()).Return( - []storage.DataReference{"error.pb"}, storage.NewCursorAtEnd(), nil) + []storage.DataReference{"s3://errors/error.pb"}, storage.NewCursorAtEnd(), nil) - store.OnHead(ctx, storage.DataReference("error.pb")).Return(MemoryMetadata{ + store.OnHead(ctx, storage.DataReference("s3://errors/error.pb")).Return(MemoryMetadata{ exists: true, }, nil) diff --git a/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go b/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go index 19aae6ba7c..e8b7a4abed 100644 --- a/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go +++ b/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go @@ -120,44 +120,44 @@ func (input Input) templateVars() []TemplateVar { }, TemplateVar{ defaultRegexes.TaskRetryAttempt, - strconv.FormatUint(uint64(taskExecutionIdentifier.RetryAttempt), 10), + strconv.FormatUint(uint64(taskExecutionIdentifier.GetRetryAttempt()), 10), }, ) - if taskExecutionIdentifier.TaskId != nil { + if taskExecutionIdentifier.GetTaskId() != nil { vars = append( vars, TemplateVar{ defaultRegexes.TaskID, - taskExecutionIdentifier.TaskId.Name, + taskExecutionIdentifier.GetTaskId().GetName(), }, TemplateVar{ defaultRegexes.TaskVersion, - taskExecutionIdentifier.TaskId.Version, + taskExecutionIdentifier.GetTaskId().GetVersion(), }, TemplateVar{ defaultRegexes.TaskProject, - taskExecutionIdentifier.TaskId.Project, + taskExecutionIdentifier.GetTaskId().GetProject(), }, TemplateVar{ defaultRegexes.TaskDomain, - taskExecutionIdentifier.TaskId.Domain, + taskExecutionIdentifier.GetTaskId().GetDomain(), }, ) } - if taskExecutionIdentifier.NodeExecutionId != nil && taskExecutionIdentifier.NodeExecutionId.ExecutionId != nil { + if taskExecutionIdentifier.GetNodeExecutionId() != nil && taskExecutionIdentifier.GetNodeExecutionId().GetExecutionId() != nil { vars = append( vars, TemplateVar{ defaultRegexes.ExecutionName, - taskExecutionIdentifier.NodeExecutionId.ExecutionId.Name, + taskExecutionIdentifier.GetNodeExecutionId().GetExecutionId().GetName(), }, TemplateVar{ defaultRegexes.ExecutionProject, - taskExecutionIdentifier.NodeExecutionId.ExecutionId.Project, + taskExecutionIdentifier.GetNodeExecutionId().GetExecutionId().GetProject(), }, TemplateVar{ defaultRegexes.ExecutionDomain, - taskExecutionIdentifier.NodeExecutionId.ExecutionId.Domain, + taskExecutionIdentifier.GetNodeExecutionId().GetExecutionId().GetDomain(), }, ) } @@ -219,9 +219,11 @@ func (p TemplateLogPlugin) GetTaskLogs(input Input) (Output, error) { } } taskLogs = append(taskLogs, &core.TaskLog{ - Uri: replaceAll(dynamicTemplateURI, templateVars), - Name: p.DisplayName + input.LogName, - MessageFormat: p.MessageFormat, + Uri: replaceAll(dynamicTemplateURI, templateVars), + Name: p.DisplayName + input.LogName, + MessageFormat: p.MessageFormat, + ShowWhilePending: p.ShowWhilePending, + HideOnceFinished: p.HideOnceFinished, }) } } diff --git a/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go b/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go index 42226bd7c0..2e885732f8 100644 --- a/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go @@ -478,6 +478,37 @@ func TestTemplateLogPlugin(t *testing.T) { }, }, }, + { + "flyteinteractive", + TemplateLogPlugin{ + Name: "vscode", + DynamicTemplateURIs: []TemplateURI{"vscode://flyteinteractive:{{ .taskConfig.port }}/{{ .podName }}"}, + MessageFormat: core.TaskLog_JSON, + HideOnceFinished: true, + ShowWhilePending: true, + }, + args{ + input: Input{ + PodName: "my-pod-name", + TaskTemplate: &core.TaskTemplate{ + Config: map[string]string{ + "link_type": "vscode", + "port": "1234", + }, + }, + }, + }, + Output{ + TaskLogs: []*core.TaskLog{ + { + Uri: "vscode://flyteinteractive:1234/my-pod-name", + MessageFormat: core.TaskLog_JSON, + ShowWhilePending: true, + HideOnceFinished: true, + }, + }, + }, + }, { "flyteinteractive - no link_type in task template", TemplateLogPlugin{ diff --git a/flyteplugins/go/tasks/pluginmachinery/utils/secrets/marshaler.go b/flyteplugins/go/tasks/pluginmachinery/utils/secrets/marshaler.go index b6ea59020b..91423a9929 100644 --- a/flyteplugins/go/tasks/pluginmachinery/utils/secrets/marshaler.go +++ b/flyteplugins/go/tasks/pluginmachinery/utils/secrets/marshaler.go @@ -53,8 +53,8 @@ func unmarshalSecret(encoded string) (*core.Secret, error) { func MarshalSecretsToMapStrings(secrets []*core.Secret) (map[string]string, error) { res := make(map[string]string, len(secrets)) for index, s := range secrets { - if _, found := core.Secret_MountType_name[int32(s.MountRequirement)]; !found { - return nil, fmt.Errorf("invalid mount requirement [%v]", s.MountRequirement) + if _, found := core.Secret_MountType_name[int32(s.GetMountRequirement())]; !found { + return nil, fmt.Errorf("invalid mount requirement [%v]", s.GetMountRequirement()) } encodedSecret := marshalSecret(s) diff --git a/flyteplugins/go/tasks/pluginmachinery/workqueue/queue.go b/flyteplugins/go/tasks/pluginmachinery/workqueue/queue.go index fce1acde89..cf5fc8e451 100644 --- a/flyteplugins/go/tasks/pluginmachinery/workqueue/queue.go +++ b/flyteplugins/go/tasks/pluginmachinery/workqueue/queue.go @@ -227,6 +227,7 @@ func (q *queue) Start(ctx context.Context) error { wrapper.retryCount++ wrapper.err = err + // #nosec G115 if wrapper.retryCount >= uint(q.maxRetries) { logger.Debugf(ctx, "WorkItem [%v] exhausted all retries. Last Error: %v.", wrapper.ID(), err) diff --git a/flyteplugins/go/tasks/plugins/array/arraystatus/status_test.go b/flyteplugins/go/tasks/plugins/array/arraystatus/status_test.go index 96aabcfcc6..a9ebea2825 100644 --- a/flyteplugins/go/tasks/plugins/array/arraystatus/status_test.go +++ b/flyteplugins/go/tasks/plugins/array/arraystatus/status_test.go @@ -29,7 +29,7 @@ func TestArrayStatus_HashCode(t *testing.T) { }) t.Run("Populated Equal", func(t *testing.T) { - expectedDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) + expectedDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) // #nosec G115 assert.Nil(t, err) expected := ArrayStatus{ Detailed: expectedDetailed, @@ -37,7 +37,7 @@ func TestArrayStatus_HashCode(t *testing.T) { expectedHashCode, err := expected.HashCode() assert.Nil(t, err) - actualDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) + actualDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) // #nosec G115 assert.Nil(t, err) actual := ArrayStatus{ Detailed: actualDetailed, @@ -49,7 +49,7 @@ func TestArrayStatus_HashCode(t *testing.T) { }) t.Run("Updated Not Equal", func(t *testing.T) { - expectedDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) + expectedDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) // #nosec G115 assert.Nil(t, err) expectedDetailed.SetItem(0, uint64(1)) expected := ArrayStatus{ @@ -58,7 +58,7 @@ func TestArrayStatus_HashCode(t *testing.T) { expectedHashCode, err := expected.HashCode() assert.Nil(t, err) - actualDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) + actualDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) // #nosec G115 assert.Nil(t, err) actual := ArrayStatus{ Detailed: actualDetailed, @@ -70,7 +70,7 @@ func TestArrayStatus_HashCode(t *testing.T) { }) t.Run("Updated Equal", func(t *testing.T) { - expectedDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) + expectedDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) // #nosec G115 assert.Nil(t, err) expectedDetailed.SetItem(0, uint64(1)) expected := ArrayStatus{ @@ -79,7 +79,7 @@ func TestArrayStatus_HashCode(t *testing.T) { expectedHashCode, err := expected.HashCode() assert.Nil(t, err) - actualDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) + actualDetailed, err := bitarray.NewCompactArray(size, bitarray.Item(len(types.Phases)-1)) // #nosec G115 actualDetailed.SetItem(0, uint64(1)) assert.Nil(t, err) actual := ArrayStatus{ diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/client_test.go b/flyteplugins/go/tasks/plugins/array/awsbatch/client_test.go index fe35f74e2a..e135aee020 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/client_test.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/client_test.go @@ -29,7 +29,7 @@ func TestClient_SubmitJob(t *testing.T) { c := NewCustomBatchClient(mocks.NewMockAwsBatchClient(), "account-id", "test-region", rateLimiter, rateLimiter).(*client) store, err := NewJobStore(ctx, c, config.JobStoreConfig{ CacheSize: 1, - Parallelizm: 1, + Parallelism: 1, BatchChunkSize: 1, ResyncPeriod: stdConfig.Duration{Duration: 1000}, }, EventHandler{}, promutils.NewTestScope()) diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/config/config.go b/flyteplugins/go/tasks/plugins/array/awsbatch/config/config.go index 7b8a484140..7815c23ff5 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/config/config.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/config/config.go @@ -30,7 +30,7 @@ type Config struct { type JobStoreConfig struct { CacheSize int `json:"jacheSize" pflag:",Maximum informer cache size as number of items. Caches are used as an optimization to lessen the load on AWS Services."` - Parallelizm int `json:"parallelizm"` + Parallelism int `json:"parallelism"` BatchChunkSize int `json:"batchChunkSize" pflag:",Determines the size of each batch sent to GetJobDetails api."` ResyncPeriod config.Duration `json:"resyncPeriod" pflag:",Defines the duration for syncing job details from AWS Batch."` } @@ -39,7 +39,7 @@ var ( defaultConfig = &Config{ JobStoreConfig: JobStoreConfig{ CacheSize: 10000, - Parallelizm: 20, + Parallelism: 20, BatchChunkSize: 100, ResyncPeriod: config.Duration{Duration: 30 * time.Second}, }, diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags.go b/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags.go index a4cb6b3b0c..5b482fb99d 100755 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags.go @@ -51,7 +51,7 @@ func (Config) mustMarshalJSON(v json.Marshaler) string { func (cfg Config) GetPFlagSet(prefix string) *pflag.FlagSet { cmdFlags := pflag.NewFlagSet("Config", pflag.ExitOnError) cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "jobStoreConfig.jacheSize"), defaultConfig.JobStoreConfig.CacheSize, "Maximum informer cache size as number of items. Caches are used as an optimization to lessen the load on AWS Services.") - cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "jobStoreConfig.parallelizm"), defaultConfig.JobStoreConfig.Parallelizm, "") + cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "jobStoreConfig.parallelism"), defaultConfig.JobStoreConfig.Parallelism, "") cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "jobStoreConfig.batchChunkSize"), defaultConfig.JobStoreConfig.BatchChunkSize, "Determines the size of each batch sent to GetJobDetails api.") cmdFlags.String(fmt.Sprintf("%v%v", prefix, "jobStoreConfig.resyncPeriod"), defaultConfig.JobStoreConfig.ResyncPeriod.String(), "Defines the duration for syncing job details from AWS Batch.") cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "defCacheSize"), defaultConfig.JobDefCacheSize, "Maximum job definition cache size as number of items. Caches are used as an optimization to lessen the load on AWS Services.") diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags_test.go b/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags_test.go index 62d8dc5ac2..9d06838911 100755 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags_test.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/config/config_flags_test.go @@ -113,14 +113,14 @@ func TestConfig_SetFlags(t *testing.T) { } }) }) - t.Run("Test_jobStoreConfig.parallelizm", func(t *testing.T) { + t.Run("Test_jobStoreConfig.parallelism", func(t *testing.T) { t.Run("Override", func(t *testing.T) { testValue := "1" - cmdFlags.Set("jobStoreConfig.parallelizm", testValue) - if vInt, err := cmdFlags.GetInt("jobStoreConfig.parallelizm"); err == nil { - testDecodeJson_Config(t, fmt.Sprintf("%v", vInt), &actual.JobStoreConfig.Parallelizm) + cmdFlags.Set("jobStoreConfig.parallelism", testValue) + if vInt, err := cmdFlags.GetInt("jobStoreConfig.parallelism"); err == nil { + testDecodeJson_Config(t, fmt.Sprintf("%v", vInt), &actual.JobStoreConfig.Parallelism) } else { assert.FailNow(t, err.Error()) diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/job_config.go b/flyteplugins/go/tasks/plugins/array/awsbatch/job_config.go index 1ca10cb39d..2831dd28ae 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/job_config.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/job_config.go @@ -41,7 +41,7 @@ func (j *JobConfig) setKeyIfKnown(key, value string) bool { func (j *JobConfig) MergeFromKeyValuePairs(pairs []*core.KeyValuePair) *JobConfig { for _, entry := range pairs { - j.setKeyIfKnown(entry.Key, entry.Value) + j.setKeyIfKnown(entry.GetKey(), entry.GetValue()) } return j diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/job_definition.go b/flyteplugins/go/tasks/plugins/array/awsbatch/job_definition.go index 1ef9e4ec5b..acd5f124dd 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/job_definition.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/job_definition.go @@ -19,8 +19,8 @@ const defaultComputeEngine = "EC2" const platformCapabilitiesConfigKey = "platformCapabilities" func getContainerImage(_ context.Context, task *core.TaskTemplate) string { - if task.GetContainer() != nil && len(task.GetContainer().Image) > 0 { - return task.GetContainer().Image + if task.GetContainer() != nil && len(task.GetContainer().GetImage()) > 0 { + return task.GetContainer().GetImage() } return "" diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go b/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go index 16d44b490e..06a1d7d155 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go @@ -372,7 +372,7 @@ func NewJobStore(ctx context.Context, batchClient Client, cfg config.JobStoreCon autoCache, err := cache.NewAutoRefreshBatchedCache("aws-batch-jobs", batchJobsForSync(ctx, cfg.BatchChunkSize), syncBatches(ctx, store, handler, cfg.BatchChunkSize), workqueue.DefaultControllerRateLimiter(), cfg.ResyncPeriod.Duration, - cfg.Parallelizm, cfg.CacheSize, scope) + uint(cfg.Parallelism), uint(cfg.CacheSize), scope) // #nosec G115 store.AutoRefresh = autoCache return store, err diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store_test.go b/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store_test.go index 8196925e15..122d03c71a 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store_test.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store_test.go @@ -35,7 +35,7 @@ func newJobsStore(t testing.TB, batchClient Client) *JobStore { func newJobsStoreWithSize(t testing.TB, batchClient Client, size int) *JobStore { store, err := NewJobStore(context.TODO(), batchClient, config.JobStoreConfig{ CacheSize: size, - Parallelizm: 1, + Parallelism: 1, BatchChunkSize: 2, ResyncPeriod: config2.Duration{Duration: 1000}, }, EventHandler{}, promutils.NewTestScope()) diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/launcher.go b/flyteplugins/go/tasks/plugins/array/awsbatch/launcher.go index 609bab6cf7..d42c5ea0fe 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/launcher.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/launcher.go @@ -33,8 +33,8 @@ func LaunchSubTasks(ctx context.Context, tCtx core.TaskExecutionContext, batchCl } // If the original job was marked as an array (not a single job), then make sure to set it up correctly. - if t.Type == arrayTaskType { - logger.Debugf(ctx, "Task is of type [%v]. Will setup task index env vars.", t.Type) + if t.GetType() == arrayTaskType { + logger.Debugf(ctx, "Task is of type [%v]. Will setup task index env vars.", t.GetType()) batchInput = UpdateBatchInputForArray(ctx, batchInput, int64(size)) } @@ -46,7 +46,7 @@ func LaunchSubTasks(ctx context.Context, tCtx core.TaskExecutionContext, batchCl metrics.SubTasksSubmitted.Add(ctx, float64(size)) - retryAttemptsArray, err := bitarray.NewCompactArray(uint(size), bitarray.Item(pluginConfig.MaxRetries)) + retryAttemptsArray, err := bitarray.NewCompactArray(uint(size), bitarray.Item(pluginConfig.MaxRetries)) // #nosec G115 if err != nil { logger.Errorf(context.Background(), "Failed to create attempts compact array with [count: %v, maxValue: %v]", size, pluginConfig.MaxRetries) return nil, err @@ -58,7 +58,7 @@ func LaunchSubTasks(ctx context.Context, tCtx core.TaskExecutionContext, batchCl Summary: arraystatus.ArraySummary{ core.PhaseQueued: int64(size), }, - Detailed: arrayCore.NewPhasesCompactArray(uint(size)), + Detailed: arrayCore.NewPhasesCompactArray(uint(size)), // #nosec G115 }). SetReason("Successfully launched subtasks."). SetRetryAttempts(retryAttemptsArray) diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go b/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go index 62bc5103dc..d5c05f6cd0 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go @@ -45,7 +45,7 @@ func CheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionContext, job } else if taskTemplate == nil { return nil, errors.Errorf(errors.BadTaskSpecification, "Required value not set, taskTemplate is nil") } - retry := toRetryStrategy(ctx, toBackoffLimit(taskTemplate.Metadata), cfg.MinRetries, cfg.MaxRetries) + retry := toRetryStrategy(ctx, toBackoffLimit(taskTemplate.GetMetadata()), cfg.MinRetries, cfg.MaxRetries) // If job isn't currently being monitored (recovering from a restart?), add it to the sync-cache and return if job == nil { @@ -67,7 +67,7 @@ func CheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionContext, job msg := errorcollector.NewErrorMessageCollector() newArrayStatus := arraystatus.ArrayStatus{ Summary: arraystatus.ArraySummary{}, - Detailed: arrayCore.NewPhasesCompactArray(uint(currentState.GetExecutionArraySize())), + Detailed: arrayCore.NewPhasesCompactArray(uint(currentState.GetExecutionArraySize())), // #nosec G115 } currentSubTaskPhaseHash, err := currentState.GetArrayStatus().HashCode() @@ -126,7 +126,7 @@ func CheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionContext, job } } - newArrayStatus.Detailed.SetItem(childIdx, bitarray.Item(actualPhase)) + newArrayStatus.Detailed.SetItem(childIdx, bitarray.Item(actualPhase)) // #nosec G115 newArrayStatus.Summary.Inc(actualPhase) parentState.RetryAttempts.SetItem(childIdx, bitarray.Item(len(subJob.Attempts))) } diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/task_links.go b/flyteplugins/go/tasks/plugins/array/awsbatch/task_links.go index caf2e51a38..64b64ac168 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/task_links.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/task_links.go @@ -89,9 +89,9 @@ func GetTaskLinks(ctx context.Context, taskMeta pluginCore.TaskExecutionMetadata externalResources = append(externalResources, &pluginCore.ExternalResource{ ExternalID: subJob.ID, - Index: uint32(originalIndex), + Index: uint32(originalIndex), // #nosec G115 Logs: subTaskLogLinks, - RetryAttempt: uint32(retryAttempt), + RetryAttempt: uint32(retryAttempt), // #nosec G115 Phase: finalPhase, }) } diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/transformer.go b/flyteplugins/go/tasks/plugins/array/awsbatch/transformer.go index 1eaef150d0..936269f2b1 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/transformer.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/transformer.go @@ -109,9 +109,9 @@ func FlyteTaskToBatchInput(ctx context.Context, tCtx pluginCore.TaskExecutionCon } submitJobInput.SetJobName(tCtx.TaskExecutionMetadata().GetTaskExecutionID().GetGeneratedName()). SetJobDefinition(jobDefinition).SetJobQueue(jobConfig.DynamicTaskQueue). - SetRetryStrategy(toRetryStrategy(ctx, toBackoffLimit(taskTemplate.Metadata), cfg.MinRetries, cfg.MaxRetries)). + SetRetryStrategy(toRetryStrategy(ctx, toBackoffLimit(taskTemplate.GetMetadata()), cfg.MinRetries, cfg.MaxRetries)). SetContainerOverrides(toContainerOverrides(ctx, append(cmd, args...), &resources, envVars)). - SetTimeout(toTimeout(taskTemplate.Metadata.GetTimeout(), cfg.DefaultTimeOut.Duration)) + SetTimeout(toTimeout(taskTemplate.GetMetadata().GetTimeout(), cfg.DefaultTimeOut.Duration)) return submitJobInput, nil } @@ -159,7 +159,7 @@ func getEnvVarsForTask(ctx context.Context, execID pluginCore.TaskExecutionID, c } func toTimeout(templateTimeout *duration.Duration, defaultTimeout time.Duration) *batch.JobTimeout { - if templateTimeout != nil && templateTimeout.Seconds > 0 { + if templateTimeout != nil && templateTimeout.GetSeconds() > 0 { return (&batch.JobTimeout{}).SetAttemptDurationSeconds(templateTimeout.GetSeconds()) } @@ -239,11 +239,11 @@ func toRetryStrategy(_ context.Context, backoffLimit *int32, minRetryAttempts, m } func toBackoffLimit(metadata *idlCore.TaskMetadata) *int32 { - if metadata == nil || metadata.Retries == nil { + if metadata == nil || metadata.GetRetries() == nil { return nil } - i := int32(metadata.Retries.Retries) + i := int32(metadata.GetRetries().GetRetries()) // #nosec G115 return &i } diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/transformer_test.go b/flyteplugins/go/tasks/plugins/array/awsbatch/transformer_test.go index bbe8c88995..642493346e 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/transformer_test.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/transformer_test.go @@ -198,7 +198,7 @@ func TestArrayJobToBatchInput(t *testing.T) { batchInput, err := FlyteTaskToBatchInput(ctx, taskCtx, "", &config.Config{}) assert.NoError(t, err) - batchInput = UpdateBatchInputForArray(ctx, batchInput, input.Size) + batchInput = UpdateBatchInputForArray(ctx, batchInput, input.GetSize()) assert.NotNil(t, batchInput) assert.Equal(t, *expectedBatchInput, *batchInput) diff --git a/flyteplugins/go/tasks/plugins/array/catalog.go b/flyteplugins/go/tasks/plugins/array/catalog.go index d6bf5e8820..60b4b224ac 100644 --- a/flyteplugins/go/tasks/plugins/array/catalog.go +++ b/flyteplugins/go/tasks/plugins/array/catalog.go @@ -39,7 +39,7 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex // Extract the custom plugin pb var arrayJob *idlPlugins.ArrayJob - if taskTemplate.Type == AwsBatchTaskType { + if taskTemplate.GetType() == AwsBatchTaskType { arrayJob = &idlPlugins.ArrayJob{ Parallelism: 1, Size: 1, @@ -48,7 +48,7 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex }, } } else { - arrayJob, err = arrayCore.ToArrayJob(taskTemplate.GetCustom(), taskTemplate.TaskTypeVersion) + arrayJob, err = arrayCore.ToArrayJob(taskTemplate.GetCustom(), taskTemplate.GetTaskTypeVersion()) } if err != nil { return state, err @@ -58,9 +58,9 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex var inputReaders []io.InputReader // Save this in the state - if taskTemplate.TaskTypeVersion == 0 { - state = state.SetOriginalArraySize(arrayJob.Size) - arrayJobSize = arrayJob.Size + if taskTemplate.GetTaskTypeVersion() == 0 { + state = state.SetOriginalArraySize(arrayJob.GetSize()) + arrayJobSize = arrayJob.GetSize() state = state.SetOriginalMinSuccesses(arrayJob.GetMinSuccesses()) // build input readers @@ -77,15 +77,15 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex // identify and validate the size of the array job size := -1 var literalCollection *idlCore.LiteralCollection - for _, literal := range inputs.Literals { + for _, literal := range inputs.GetLiterals() { if literalCollection = literal.GetCollection(); literalCollection != nil { // validate length of input list - if size != -1 && size != len(literalCollection.Literals) { + if size != -1 && size != len(literalCollection.GetLiterals()) { state = state.SetPhase(arrayCore.PhasePermanentFailure, 0).SetReason("all maptask input lists must be the same length") return state, nil } - size = len(literalCollection.Literals) + size = len(literalCollection.GetLiterals()) } } @@ -106,7 +106,7 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex arrayJobSize = int64(size) // build input readers - inputReaders = ConstructStaticInputReaders(tCtx.InputReader(), inputs.Literals, size) + inputReaders = ConstructStaticInputReaders(tCtx.InputReader(), inputs.GetLiterals(), size) } if arrayJobSize > maxArrayJobSize { @@ -117,10 +117,10 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex } // If the task is not discoverable, then skip data catalog work and move directly to launch - if taskTemplate.Metadata == nil || !taskTemplate.Metadata.Discoverable { + if taskTemplate.GetMetadata() == nil || !taskTemplate.GetMetadata().GetDiscoverable() { logger.Infof(ctx, "Task is not discoverable, moving to launch phase...") // Set an all set indexes to cache. This task won't try to write to catalog anyway. - state = state.SetIndexesToCache(arrayCore.InvertBitSet(bitarray.NewBitSet(uint(arrayJobSize)), uint(arrayJobSize))) + state = state.SetIndexesToCache(arrayCore.InvertBitSet(bitarray.NewBitSet(uint(arrayJobSize)), uint(arrayJobSize))) // #nosec G115 state = state.SetPhase(arrayCore.PhasePreLaunch, core.DefaultPhaseVersion).SetReason("Task is not discoverable.") state.SetExecutionArraySize(int(arrayJobSize)) @@ -165,7 +165,7 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex // TODO: maybe add a config option to decide the behavior on catalog failure. logger.Warnf(ctx, "Failing to lookup catalog. Will move on to launching the task. Error: %v", err) - state = state.SetIndexesToCache(arrayCore.InvertBitSet(bitarray.NewBitSet(uint(arrayJobSize)), uint(arrayJobSize))) + state = state.SetIndexesToCache(arrayCore.InvertBitSet(bitarray.NewBitSet(uint(arrayJobSize)), uint(arrayJobSize))) // #nosec G115 state = state.SetExecutionArraySize(int(arrayJobSize)) state = state.SetPhase(arrayCore.PhasePreLaunch, core.DefaultPhaseVersion).SetReason(fmt.Sprintf("Skipping cache check due to err [%v]", err)) return state, nil @@ -178,7 +178,7 @@ func DetermineDiscoverability(ctx context.Context, tCtx core.TaskExecutionContex } cachedResults := resp.GetCachedResults() - state = state.SetIndexesToCache(arrayCore.InvertBitSet(cachedResults, uint(arrayJobSize))) + state = state.SetIndexesToCache(arrayCore.InvertBitSet(cachedResults, uint(arrayJobSize))) // #nosec G115 state = state.SetExecutionArraySize(int(arrayJobSize) - resp.GetCachedCount()) // If all the sub-tasks are actually done, then we can just move on. @@ -223,14 +223,14 @@ func WriteToDiscovery(ctx context.Context, tCtx core.TaskExecutionContext, state return state, externalResources, errors.Errorf(errors.BadTaskSpecification, "Required value not set, taskTemplate is nil") } - if tMeta := taskTemplate.Metadata; tMeta == nil || !tMeta.Discoverable { + if tMeta := taskTemplate.GetMetadata(); tMeta == nil || !tMeta.GetDiscoverable() { logger.Debugf(ctx, "Task is not marked as discoverable. Moving to [%v] phase.", phaseOnSuccess) return state.SetPhase(phaseOnSuccess, versionOnSuccess).SetReason("Task is not discoverable."), externalResources, nil } var inputReaders []io.InputReader arrayJobSize := int(state.GetOriginalArraySize()) - if taskTemplate.TaskTypeVersion == 0 { + if taskTemplate.GetTaskTypeVersion() == 0 { // input readers inputReaders, err = ConstructRemoteFileInputReaders(ctx, tCtx.DataStore(), tCtx.InputReader().GetInputPrefixPath(), arrayJobSize) if err != nil { @@ -242,7 +242,7 @@ func WriteToDiscovery(ctx context.Context, tCtx core.TaskExecutionContext, state return state, externalResources, errors.Errorf(errors.MetadataAccessFailed, "Could not read inputs and therefore failed to determine array job size") } - inputReaders = ConstructStaticInputReaders(tCtx.InputReader(), inputs.Literals, arrayJobSize) + inputReaders = ConstructStaticInputReaders(tCtx.InputReader(), inputs.GetLiterals(), arrayJobSize) } // output reader @@ -251,8 +251,8 @@ func WriteToDiscovery(ctx context.Context, tCtx core.TaskExecutionContext, state return nil, externalResources, err } - iface := *taskTemplate.Interface - iface.Outputs = makeSingularTaskInterface(iface.Outputs) + iface := taskTemplate.GetInterface() + iface.Outputs = makeSingularTaskInterface(iface.GetOutputs()) // Do not cache failed tasks. Retrieve the final phase from array status and unset the non-successful ones. @@ -262,14 +262,15 @@ func WriteToDiscovery(ctx context.Context, tCtx core.TaskExecutionContext, state if !phase.IsSuccess() { // tasksToCache is built on the originalArraySize and ArrayStatus.Detailed is the executionArraySize originalIdx := arrayCore.CalculateOriginalIndex(idx, state.GetIndexesToCache()) - tasksToCache.Clear(uint(originalIdx)) + tasksToCache.Clear(uint(originalIdx)) // #nosec G115 } } // Create catalog put items, but only put the ones that were not originally cached (as read from the catalog results bitset) - catalogWriterItems, err := ConstructCatalogUploadRequests(*tCtx.TaskExecutionMetadata().GetTaskExecutionID().GetID().TaskId, - tCtx.TaskExecutionMetadata().GetTaskExecutionID().GetID(), taskTemplate.Metadata.DiscoveryVersion, - taskTemplate.Metadata.CacheIgnoreInputVars, iface, &tasksToCache, inputReaders, outputReaders) + //nolint:protogetter + catalogWriterItems, err := ConstructCatalogUploadRequests(tCtx.TaskExecutionMetadata().GetTaskExecutionID().GetID().TaskId, + tCtx.TaskExecutionMetadata().GetTaskExecutionID().GetID(), taskTemplate.GetMetadata().GetDiscoveryVersion(), + taskTemplate.GetMetadata().GetCacheIgnoreInputVars(), iface, &tasksToCache, inputReaders, outputReaders) if err != nil { return nil, externalResources, err @@ -292,6 +293,7 @@ func WriteToDiscovery(ctx context.Context, tCtx core.TaskExecutionContext, state externalResources = make([]*core.ExternalResource, 0) for idx, phaseIdx := range state.ArrayStatus.Detailed.GetItems() { originalIdx := arrayCore.CalculateOriginalIndex(idx, state.GetIndexesToCache()) + // #nosec G115 if !tasksToCache.IsSet(uint(originalIdx)) { continue } @@ -299,8 +301,8 @@ func WriteToDiscovery(ctx context.Context, tCtx core.TaskExecutionContext, state externalResources = append(externalResources, &core.ExternalResource{ CacheStatus: idlCore.CatalogCacheStatus_CACHE_POPULATED, - Index: uint32(originalIdx), - RetryAttempt: uint32(state.RetryAttempts.GetItem(idx)), + Index: uint32(originalIdx), // #nosec G115 + RetryAttempt: uint32(state.RetryAttempts.GetItem(idx)), // #nosec G115 Phase: core.Phases[phaseIdx], }, ) @@ -337,8 +339,8 @@ func WriteToCatalog(ctx context.Context, ownerSignal core.SignalAsync, catalogCl return false, nil } -func ConstructCatalogUploadRequests(keyID idlCore.Identifier, taskExecID idlCore.TaskExecutionIdentifier, - cacheVersion string, cacheIgnoreInputVars []string, taskInterface idlCore.TypedInterface, whichTasksToCache *bitarray.BitSet, +func ConstructCatalogUploadRequests(keyID *idlCore.Identifier, taskExecID idlCore.TaskExecutionIdentifier, + cacheVersion string, cacheIgnoreInputVars []string, taskInterface *idlCore.TypedInterface, whichTasksToCache *bitarray.BitSet, inputReaders []io.InputReader, outputReaders []io.OutputReader) ([]catalog.UploadRequest, error) { writerWorkItems := make([]catalog.UploadRequest, 0, len(inputReaders)) @@ -349,17 +351,18 @@ func ConstructCatalogUploadRequests(keyID idlCore.Identifier, taskExecID idlCore } for idx, input := range inputReaders { + // #nosec G115 if !whichTasksToCache.IsSet(uint(idx)) { continue } wi := catalog.UploadRequest{ Key: catalog.Key{ - Identifier: keyID, + Identifier: *keyID, InputReader: input, CacheVersion: cacheVersion, CacheIgnoreInputVars: cacheIgnoreInputVars, - TypedInterface: taskInterface, + TypedInterface: *taskInterface, }, ArtifactData: outputReaders[idx], ArtifactMetadata: catalog.Metadata{ @@ -400,6 +403,7 @@ func NewLiteralScalarOfInteger(number int64) *idlCore.Literal { func CatalogBitsetToLiteralCollection(catalogResults *bitarray.BitSet, size int) *idlCore.LiteralCollection { literals := make([]*idlCore.Literal, 0, size) for i := 0; i < size; i++ { + // #nosec G115 if !catalogResults.IsSet(uint(i)) { literals = append(literals, NewLiteralScalarOfInteger(int64(i))) } @@ -410,15 +414,15 @@ func CatalogBitsetToLiteralCollection(catalogResults *bitarray.BitSet, size int) } func makeSingularTaskInterface(varMap *idlCore.VariableMap) *idlCore.VariableMap { - if varMap == nil || len(varMap.Variables) == 0 { + if varMap == nil || len(varMap.GetVariables()) == 0 { return varMap } res := &idlCore.VariableMap{ - Variables: make(map[string]*idlCore.Variable, len(varMap.Variables)), + Variables: make(map[string]*idlCore.Variable, len(varMap.GetVariables())), } - for key, val := range varMap.Variables { + for key, val := range varMap.GetVariables() { if val.GetType().GetCollectionType() != nil { res.Variables[key] = &idlCore.Variable{Type: val.GetType().GetCollectionType()} } else { @@ -440,17 +444,17 @@ func ConstructCatalogReaderWorkItems(ctx context.Context, taskReader core.TaskRe workItems := make([]catalog.DownloadRequest, 0, len(inputs)) - iface := *t.Interface - iface.Outputs = makeSingularTaskInterface(iface.Outputs) + iface := t.GetInterface() + iface.Outputs = makeSingularTaskInterface(iface.GetOutputs()) for idx, inputReader := range inputs { // TODO: Check if Identifier or Interface are empty and return err item := catalog.DownloadRequest{ Key: catalog.Key{ - Identifier: *t.Id, - CacheVersion: t.GetMetadata().DiscoveryVersion, + Identifier: *t.Id, //nolint:protogetter + CacheVersion: t.GetMetadata().GetDiscoveryVersion(), InputReader: inputReader, - TypedInterface: iface, + TypedInterface: *iface, }, Target: outputs[idx], } @@ -471,7 +475,7 @@ func ConstructStaticInputReaders(inputPaths io.InputFilePaths, inputLiterals map for inputName, inputLiteral := range inputLiterals { if literalCollection = inputLiteral.GetCollection(); literalCollection != nil { // if literal is a collection then we need to retrieve the specific literal for this subtask index - literals[inputName] = literalCollection.Literals[i] + literals[inputName] = literalCollection.GetLiterals()[i] } else { literals[inputName] = inputLiteral } diff --git a/flyteplugins/go/tasks/plugins/array/catalog_test.go b/flyteplugins/go/tasks/plugins/array/catalog_test.go index 15a36a4dcf..296d2283d4 100644 --- a/flyteplugins/go/tasks/plugins/array/catalog_test.go +++ b/flyteplugins/go/tasks/plugins/array/catalog_test.go @@ -102,19 +102,19 @@ var ( func TestNewLiteralScalarOfInteger(t *testing.T) { l := NewLiteralScalarOfInteger(int64(65)) - assert.Equal(t, int64(65), l.Value.(*core.Literal_Scalar).Scalar.Value.(*core.Scalar_Primitive). - Primitive.Value.(*core.Primitive_Integer).Integer) + assert.Equal(t, int64(65), l.GetValue().(*core.Literal_Scalar).Scalar.GetValue().(*core.Scalar_Primitive). + Primitive.GetValue().(*core.Primitive_Integer).Integer) } func TestCatalogBitsetToLiteralCollection(t *testing.T) { ba := bitarray.NewBitSet(3) ba.Set(1) lc := CatalogBitsetToLiteralCollection(ba, 3) - assert.Equal(t, 2, len(lc.Literals)) - assert.Equal(t, int64(0), lc.Literals[0].Value.(*core.Literal_Scalar).Scalar.Value.(*core.Scalar_Primitive). - Primitive.Value.(*core.Primitive_Integer).Integer) - assert.Equal(t, int64(2), lc.Literals[1].Value.(*core.Literal_Scalar).Scalar.Value.(*core.Scalar_Primitive). - Primitive.Value.(*core.Primitive_Integer).Integer) + assert.Equal(t, 2, len(lc.GetLiterals())) + assert.Equal(t, int64(0), lc.GetLiterals()[0].GetValue().(*core.Literal_Scalar).Scalar.GetValue().(*core.Scalar_Primitive). + Primitive.GetValue().(*core.Primitive_Integer).Integer) + assert.Equal(t, int64(2), lc.GetLiterals()[1].GetValue().(*core.Literal_Scalar).Scalar.GetValue().(*core.Scalar_Primitive). + Primitive.GetValue().(*core.Primitive_Integer).Integer) } func runDetermineDiscoverabilityTest(t testing.TB, taskTemplate *core.TaskTemplate, future catalog.DownloadFuture, diff --git a/flyteplugins/go/tasks/plugins/array/core/metadata.go b/flyteplugins/go/tasks/plugins/array/core/metadata.go index 4ac7c71b4c..fcbaa3456d 100644 --- a/flyteplugins/go/tasks/plugins/array/core/metadata.go +++ b/flyteplugins/go/tasks/plugins/array/core/metadata.go @@ -29,10 +29,11 @@ func InitializeExternalResources(ctx context.Context, tCtx core.TaskExecutionCon var childIndex int var phase core.Phase + // #nosec G115 if state.IndexesToCache.IsSet(uint(i)) { // if not cached set to PhaseUndefined and set cacheStatus according to Discoverable phase = core.PhaseUndefined - if taskTemplate.Metadata == nil || !taskTemplate.Metadata.Discoverable { + if taskTemplate.GetMetadata() == nil || !taskTemplate.GetMetadata().GetDiscoverable() { cacheStatus = idlCore.CatalogCacheStatus_CACHE_DISABLED } else { cacheStatus = idlCore.CatalogCacheStatus_CACHE_MISS @@ -54,7 +55,7 @@ func InitializeExternalResources(ctx context.Context, tCtx core.TaskExecutionCon externalResources[i] = &core.ExternalResource{ ExternalID: subTaskID, CacheStatus: cacheStatus, - Index: uint32(i), + Index: uint32(i), // #nosec G115 Logs: nil, RetryAttempt: 0, Phase: phase, diff --git a/flyteplugins/go/tasks/plugins/array/core/metadata_test.go b/flyteplugins/go/tasks/plugins/array/core/metadata_test.go index 262bd3b822..370af258c7 100644 --- a/flyteplugins/go/tasks/plugins/array/core/metadata_test.go +++ b/flyteplugins/go/tasks/plugins/array/core/metadata_test.go @@ -17,9 +17,9 @@ func TestInitializeExternalResources(t *testing.T) { subTaskCount := 10 cachedCount := 4 - indexesToCache := InvertBitSet(bitarray.NewBitSet(uint(subTaskCount)), uint(subTaskCount)) + indexesToCache := InvertBitSet(bitarray.NewBitSet(uint(subTaskCount)), uint(subTaskCount)) // #nosec G115 for i := 0; i < cachedCount; i++ { - indexesToCache.Clear(uint(i)) + indexesToCache.Clear(uint(i)) // #nosec G115 } tr := &mocks.TaskReader{} @@ -54,7 +54,7 @@ func TestInitializeExternalResources(t *testing.T) { assert.Nil(t, err) assert.Equal(t, subTaskCount, len(externalResources)) for i, externalResource := range externalResources { - assert.Equal(t, uint32(i), externalResource.Index) + assert.Equal(t, uint32(i), externalResource.Index) // #nosec G115 assert.Equal(t, 0, len(externalResource.Logs)) assert.Equal(t, uint32(0), externalResource.RetryAttempt) if i < cachedCount { diff --git a/flyteplugins/go/tasks/plugins/array/core/state.go b/flyteplugins/go/tasks/plugins/array/core/state.go index a540359b0a..8fcc85946b 100644 --- a/flyteplugins/go/tasks/plugins/array/core/state.go +++ b/flyteplugins/go/tasks/plugins/array/core/state.go @@ -303,7 +303,7 @@ func InvertBitSet(input *bitarray.BitSet, limit uint) *bitarray.BitSet { func NewPhasesCompactArray(count uint) bitarray.CompactArray { // TODO: This is fragile, we should introduce a TaskPhaseCount as the last element in the enum - a, err := bitarray.NewCompactArray(count, bitarray.Item(len(core.Phases)-1)) + a, err := bitarray.NewCompactArray(count, bitarray.Item(len(core.Phases)-1)) // #nosec G115 if err != nil { logger.Warnf(context.Background(), "Failed to create compact array with provided parameters [count: %v]", count) @@ -322,7 +322,7 @@ func CalculateOriginalIndex(childIdx int, toCache *bitarray.BitSet) int { } if childIdx+1 == sum { - return int(i) + return int(i) // #nosec G115 } } diff --git a/flyteplugins/go/tasks/plugins/array/core/state_test.go b/flyteplugins/go/tasks/plugins/array/core/state_test.go index 969c98df20..84ac17d315 100644 --- a/flyteplugins/go/tasks/plugins/array/core/state_test.go +++ b/flyteplugins/go/tasks/plugins/array/core/state_test.go @@ -27,7 +27,7 @@ func TestInvertBitSet(t *testing.T) { assertBitSetsEqual(t, expected, actual, 4) } -func assertBitSetsEqual(t testing.TB, b1, b2 *bitarray.BitSet, len int) { +func assertBitSetsEqual(t testing.TB, b1, b2 *bitarray.BitSet, len uint) { if b1 == nil { assert.Nil(t, b2) } else if b2 == nil { @@ -35,7 +35,7 @@ func assertBitSetsEqual(t testing.TB, b1, b2 *bitarray.BitSet, len int) { } assert.Equal(t, b1.Cap(), b2.Cap()) - for i := uint(0); i < uint(len); i++ { + for i := uint(0); i < len; i++ { assert.Equal(t, b1.IsSet(i), b2.IsSet(i), "At index %v", i) } } @@ -43,11 +43,11 @@ func assertBitSetsEqual(t testing.TB, b1, b2 *bitarray.BitSet, len int) { func TestMapArrayStateToPluginPhase(t *testing.T) { ctx := context.Background() - subTaskCount := 3 + subTaskCount := uint(3) - detailedArray := NewPhasesCompactArray(uint(subTaskCount)) - indexesToCache := InvertBitSet(bitarray.NewBitSet(uint(subTaskCount)), uint(subTaskCount)) - retryAttemptsArray, err := bitarray.NewCompactArray(uint(subTaskCount), bitarray.Item(1)) + detailedArray := NewPhasesCompactArray(subTaskCount) + indexesToCache := InvertBitSet(bitarray.NewBitSet(subTaskCount), subTaskCount) + retryAttemptsArray, err := bitarray.NewCompactArray(subTaskCount, bitarray.Item(1)) assert.NoError(t, err) t.Run("start", func(t *testing.T) { diff --git a/flyteplugins/go/tasks/plugins/array/inputs.go b/flyteplugins/go/tasks/plugins/array/inputs.go index e0a7035181..8e4e746530 100644 --- a/flyteplugins/go/tasks/plugins/array/inputs.go +++ b/flyteplugins/go/tasks/plugins/array/inputs.go @@ -20,7 +20,7 @@ func (i arrayJobInputReader) GetInputPath() storage.DataReference { } func GetInputReader(tCtx core.TaskExecutionContext, taskTemplate *idlCore.TaskTemplate) io.InputReader { - if taskTemplate.GetTaskTypeVersion() == 0 && taskTemplate.Type != AwsBatchTaskType { + if taskTemplate.GetTaskTypeVersion() == 0 && taskTemplate.GetType() != AwsBatchTaskType { // Prior to task type version == 1, dynamic type tasks (including array tasks) would write input files for each // individual array task instance. In this case we use a modified input reader to only pass in the parent input // directory. diff --git a/flyteplugins/go/tasks/plugins/array/k8s/management.go b/flyteplugins/go/tasks/plugins/array/k8s/management.go index 12eea118cc..e64c3e601a 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/management.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/management.go @@ -69,7 +69,7 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon messageCollector := errorcollector.NewErrorMessageCollector() newArrayStatus := &arraystatus.ArrayStatus{ Summary: arraystatus.ArraySummary{}, - Detailed: arrayCore.NewPhasesCompactArray(uint(currentState.GetExecutionArraySize())), + Detailed: arrayCore.NewPhasesCompactArray(uint(currentState.GetExecutionArraySize())), // #nosec G115 } externalResources = make([]*core.ExternalResource, 0, len(currentState.GetArrayStatus().Detailed.GetItems())) @@ -82,7 +82,7 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon // If the current State is newly minted then we must initialize RetryAttempts to track how many // times each subtask is executed. if len(currentState.RetryAttempts.GetItems()) == 0 { - count := uint(currentState.GetExecutionArraySize()) + count := uint(currentState.GetExecutionArraySize()) // #nosec G115 maxValue := bitarray.Item(tCtx.TaskExecutionMetadata().GetMaxAttempts()) retryAttemptsArray, err := bitarray.NewCompactArray(count, maxValue) @@ -104,7 +104,7 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon // times the subtask failed due to system issues, this is necessary to correctly evaluate // interruptible subtasks. if len(currentState.SystemFailures.GetItems()) == 0 { - count := uint(currentState.GetExecutionArraySize()) + count := uint(currentState.GetExecutionArraySize()) // #nosec G115 maxValue := bitarray.Item(tCtx.TaskExecutionMetadata().GetMaxAttempts()) systemFailuresArray, err := bitarray.NewCompactArray(count, maxValue) @@ -134,13 +134,13 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon return currentState, externalResources, errors.Errorf(errors.BadTaskSpecification, "Required value not set, taskTemplate is nil") } - arrayJob, err := arrayCore.ToArrayJob(taskTemplate.GetCustom(), taskTemplate.TaskTypeVersion) + arrayJob, err := arrayCore.ToArrayJob(taskTemplate.GetCustom(), taskTemplate.GetTaskTypeVersion()) if err != nil { return currentState, externalResources, err } currentParallelism := 0 - maxParallelism := int(arrayJob.Parallelism) + maxParallelism := int(arrayJob.GetParallelism()) currentSubTaskPhaseHash, err := currentState.GetArrayStatus().HashCode() if err != nil { @@ -155,7 +155,7 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon retryAttempt++ newState.RetryAttempts.SetItem(childIdx, retryAttempt) } else if existingPhase.IsTerminal() { - newArrayStatus.Detailed.SetItem(childIdx, bitarray.Item(existingPhase)) + newArrayStatus.Detailed.SetItem(childIdx, bitarray.Item(existingPhase)) // #nosec G115 continue } @@ -246,12 +246,13 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon } } + // #nosec G115 if actualPhase == core.PhaseRetryableFailure && uint32(retryAttempt+1) >= stCtx.TaskExecutionMetadata().GetMaxAttempts() { // If we see a retryable failure we must check if the number of retries exceeds the maximum // attempts. If so, transition to a permanent failure so that is not attempted again. actualPhase = core.PhasePermanentFailure } - newArrayStatus.Detailed.SetItem(childIdx, bitarray.Item(actualPhase)) + newArrayStatus.Detailed.SetItem(childIdx, bitarray.Item(actualPhase)) // #nosec G115 if actualPhase.IsTerminal() { err = deallocateResource(ctx, stCtx, config, podName) @@ -275,9 +276,9 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon externalResources = append(externalResources, &core.ExternalResource{ ExternalID: podName, - Index: uint32(originalIdx), + Index: uint32(originalIdx), // #nosec G115 Logs: logLinks, - RetryAttempt: uint32(retryAttempt), + RetryAttempt: uint32(retryAttempt), // #nosec G115 Phase: actualPhase, }) @@ -383,15 +384,15 @@ func TerminateSubTasks(ctx context.Context, tCtx core.TaskExecutionContext, kube } else { externalResources = append(externalResources, &core.ExternalResource{ ExternalID: stCtx.TaskExecutionMetadata().GetTaskExecutionID().GetGeneratedName(), - Index: uint32(originalIdx), - RetryAttempt: uint32(retryAttempt), + Index: uint32(originalIdx), // #nosec G115 + RetryAttempt: uint32(retryAttempt), // #nosec G115 Phase: core.PhaseAborted, }) } } if messageCollector.Length() > 0 { - return currentState, externalResources, fmt.Errorf(messageCollector.Summary(config.MaxErrorStringLength)) + return currentState, externalResources, fmt.Errorf(messageCollector.Summary(config.MaxErrorStringLength)) //nolint } return currentState.SetPhase(arrayCore.PhaseWriteToDiscoveryThenFail, currentState.PhaseVersion+1), externalResources, nil diff --git a/flyteplugins/go/tasks/plugins/array/k8s/management_test.go b/flyteplugins/go/tasks/plugins/array/k8s/management_test.go index 7100fbc34c..d1628f98a2 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/management_test.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/management_test.go @@ -217,8 +217,10 @@ func TestCheckSubTasksState(t *testing.T) { OriginalArraySize: int64(subtaskCount), OriginalMinSuccesses: int64(subtaskCount), ArrayStatus: arraystatus.ArrayStatus{ + // #nosec G115 Detailed: arrayCore.NewPhasesCompactArray(uint(subtaskCount)), // set all tasks to core.PhaseUndefined }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), // set all tasks to be cached } @@ -254,8 +256,10 @@ func TestCheckSubTasksState(t *testing.T) { OriginalArraySize: int64(subtaskCount), OriginalMinSuccesses: int64(subtaskCount), ArrayStatus: arraystatus.ArrayStatus{ + // #nosec G115 Detailed: arrayCore.NewPhasesCompactArray(uint(subtaskCount)), // set all tasks to core.PhaseUndefined }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), // set all tasks to be cached } @@ -296,8 +300,10 @@ func TestCheckSubTasksState(t *testing.T) { OriginalArraySize: int64(subtaskCount), OriginalMinSuccesses: int64(subtaskCount), ArrayStatus: arraystatus.ArrayStatus{ + // #nosec G115 Detailed: arrayCore.NewPhasesCompactArray(uint(subtaskCount)), // set all tasks to core.PhaseUndefined }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), // set all tasks to be cached } @@ -342,12 +348,12 @@ func TestCheckSubTasksState(t *testing.T) { tCtx := getMockTaskExecutionContext(ctx, 0) tCtx.OnResourceManager().Return(&resourceManager) - detailed := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) + detailed := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) // #nosec G115 for i := 0; i < subtaskCount; i++ { detailed.SetItem(i, bitarray.Item(core.PhaseRetryableFailure)) // set all tasks to core.PhaseRetryableFailure } - retryAttemptsArray, err := bitarray.NewCompactArray(uint(subtaskCount), bitarray.Item(1)) + retryAttemptsArray, err := bitarray.NewCompactArray(uint(subtaskCount), bitarray.Item(1)) // #nosec G115 assert.NoError(t, err) currentState := &arrayCore.State{ @@ -358,6 +364,7 @@ func TestCheckSubTasksState(t *testing.T) { ArrayStatus: arraystatus.ArrayStatus{ Detailed: detailed, }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), // set all tasks to be cached RetryAttempts: retryAttemptsArray, } @@ -411,8 +418,9 @@ func TestCheckSubTasksState(t *testing.T) { tCtx := getMockTaskExecutionContext(ctx, 0) tCtx.OnResourceManager().Return(&resourceManager) - detailed := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) + detailed := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) // #nosec G115 for i := 0; i < subtaskCount; i++ { + // #nosec G115 detailed.SetItem(i, bitarray.Item(core.PhaseRunning)) // set all tasks to core.PhaseRunning } @@ -424,6 +432,7 @@ func TestCheckSubTasksState(t *testing.T) { ArrayStatus: arraystatus.ArrayStatus{ Detailed: detailed, }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), // set all tasks to be cached } @@ -445,10 +454,10 @@ func TestCheckSubTasksState(t *testing.T) { logLinks := externalResource.Logs assert.Equal(t, 2, len(logLinks)) - assert.Equal(t, fmt.Sprintf("Kubernetes Logs #0-%d", i), logLinks[0].Name) - assert.Equal(t, fmt.Sprintf("k8s/log/a-n-b/notfound-%d/pod?namespace=a-n-b", i), logLinks[0].Uri) - assert.Equal(t, fmt.Sprintf("Cloudwatch Logs #0-%d", i), logLinks[1].Name) - assert.Equal(t, fmt.Sprintf("https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#logStream:group=/kubernetes/flyte;prefix=var.log.containers.notfound-%d;streamFilter=typeLogStreamPrefix", i), logLinks[1].Uri) + assert.Equal(t, fmt.Sprintf("Kubernetes Logs #0-%d", i), logLinks[0].GetName()) + assert.Equal(t, fmt.Sprintf("k8s/log/a-n-b/notfound-%d/pod?namespace=a-n-b", i), logLinks[0].GetUri()) + assert.Equal(t, fmt.Sprintf("Cloudwatch Logs #0-%d", i), logLinks[1].GetName()) + assert.Equal(t, fmt.Sprintf("https://console.aws.amazon.com/cloudwatch/home?region=us-east-1#logStream:group=/kubernetes/flyte;prefix=var.log.containers.notfound-%d;streamFilter=typeLogStreamPrefix", i), logLinks[1].GetUri()) } }) @@ -464,12 +473,13 @@ func TestCheckSubTasksState(t *testing.T) { tCtx := getMockTaskExecutionContext(ctx, 0) tCtx.OnResourceManager().Return(&resourceManager) - detailed := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) + detailed := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) // #nosec G115 for i := 0; i < subtaskCount; i++ { + // #nosec G115 detailed.SetItem(i, bitarray.Item(core.PhaseRunning)) // set all tasks to core.PhaseRunning } - retryAttemptsArray, err := bitarray.NewCompactArray(uint(subtaskCount), bitarray.Item(1)) + retryAttemptsArray, err := bitarray.NewCompactArray(uint(subtaskCount), bitarray.Item(1)) // #nosec G115 assert.NoError(t, err) currentState := &arrayCore.State{ @@ -480,6 +490,7 @@ func TestCheckSubTasksState(t *testing.T) { ArrayStatus: arraystatus.ArrayStatus{ Detailed: detailed, }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), // set all tasks to be cached RetryAttempts: retryAttemptsArray, } @@ -509,11 +520,13 @@ func TestCheckSubTasksState(t *testing.T) { tCtx := getMockTaskExecutionContext(ctx, 0) tCtx.OnResourceManager().Return(&resourceManager) + // #nosec G115 detailed := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) for i := 0; i < subtaskCount; i++ { detailed.SetItem(i, bitarray.Item(core.PhaseRunning)) // set all tasks to core.PhaseRunning } + // #nosec G115 retryAttemptsArray, err := bitarray.NewCompactArray(uint(subtaskCount), bitarray.Item(1)) assert.NoError(t, err) @@ -529,6 +542,7 @@ func TestCheckSubTasksState(t *testing.T) { ArrayStatus: arraystatus.ArrayStatus{ Detailed: detailed, }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), // set all tasks to be cached RetryAttempts: retryAttemptsArray, } @@ -561,7 +575,7 @@ func TestTerminateSubTasksOnAbort(t *testing.T) { kubeClient.OnGetClient().Return(mocks.NewFakeKubeClient()) kubeClient.OnGetCache().Return(mocks.NewFakeKubeCache()) - compactArray := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) + compactArray := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) // #nosec G115 for i := 0; i < subtaskCount; i++ { compactArray.SetItem(i, 5) } @@ -574,6 +588,7 @@ func TestTerminateSubTasksOnAbort(t *testing.T) { ArrayStatus: arraystatus.ArrayStatus{ Detailed: compactArray, }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), } @@ -652,9 +667,10 @@ func TestTerminateSubTasks(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { + // #nosec G115 compactArray := arrayCore.NewPhasesCompactArray(uint(subtaskCount)) for i, phaseIdx := range test.initialPhaseIndices { - compactArray.SetItem(i, bitarray.Item(phaseIdx)) + compactArray.SetItem(i, bitarray.Item(phaseIdx)) // #nosec G115 } currentState := &arrayCore.State{ CurrentPhase: arrayCore.PhaseCheckingSubTaskExecutions, @@ -665,6 +681,7 @@ func TestTerminateSubTasks(t *testing.T) { ArrayStatus: arraystatus.ArrayStatus{ Detailed: compactArray, }, + // #nosec G115 IndexesToCache: arrayCore.InvertBitSet(bitarray.NewBitSet(uint(subtaskCount)), uint(subtaskCount)), } diff --git a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go index b76fe70d28..d0e483257d 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go @@ -192,7 +192,7 @@ func (s SubTaskExecutionID) TemplateVarsByScheme() []tasklog.TemplateVar { {Regex: LogTemplateRegexes.ParentName, Value: s.parentName}, { Regex: LogTemplateRegexes.ExecutionIndex, - Value: strconv.FormatUint(uint64(s.executionIndex), 10), + Value: strconv.FormatUint(uint64(s.executionIndex), 10), // #nosec G115 }, { Regex: LogTemplateRegexes.RetryAttempt, @@ -212,7 +212,7 @@ func NewSubTaskExecutionID(taskExecutionID pluginsCore.TaskExecutionID, executio executionIndex, taskExecutionID.GetGeneratedName(), retryAttempt, - taskExecutionID.GetID().RetryAttempt, + taskExecutionID.GetID().RetryAttempt, //nolint:protogetter } } @@ -252,8 +252,8 @@ func NewSubTaskExecutionMetadata(taskExecutionMetadata pluginsCore.TaskExecution var err error secretsMap := make(map[string]string) injectSecretsLabel := make(map[string]string) - if taskTemplate.SecurityContext != nil && len(taskTemplate.SecurityContext.Secrets) > 0 { - secretsMap, err = secrets.MarshalSecretsToMapStrings(taskTemplate.SecurityContext.Secrets) + if taskTemplate.GetSecurityContext() != nil && len(taskTemplate.GetSecurityContext().GetSecrets()) > 0 { + secretsMap, err = secrets.MarshalSecretsToMapStrings(taskTemplate.GetSecurityContext().GetSecrets()) if err != nil { return SubTaskExecutionMetadata{}, err } @@ -264,6 +264,7 @@ func NewSubTaskExecutionMetadata(taskExecutionMetadata pluginsCore.TaskExecution } subTaskExecutionID := NewSubTaskExecutionID(taskExecutionMetadata.GetTaskExecutionID(), executionIndex, retryAttempt) + // #nosec G115 interruptible := taskExecutionMetadata.IsInterruptible() && int32(systemFailures) < taskExecutionMetadata.GetInterruptibleFailureThreshold() return SubTaskExecutionMetadata{ taskExecutionMetadata, diff --git a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go index a7f5aa20b4..83aead4f5e 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go @@ -31,8 +31,8 @@ func TestSubTaskExecutionContext(t *testing.T) { subtaskTemplate, err := stCtx.TaskReader().Read(ctx) assert.Nil(t, err) - assert.Equal(t, int32(2), subtaskTemplate.TaskTypeVersion) - assert.Equal(t, podPlugin.ContainerTaskType, subtaskTemplate.Type) + assert.Equal(t, int32(2), subtaskTemplate.GetTaskTypeVersion()) + assert.Equal(t, podPlugin.ContainerTaskType, subtaskTemplate.GetType()) assert.Equal(t, storage.DataReference("/prefix/"), stCtx.OutputWriter().GetOutputPrefixPath()) assert.Equal(t, storage.DataReference("/raw_prefix/5/1"), stCtx.OutputWriter().GetRawOutputPrefix()) assert.Equal(t, diff --git a/flyteplugins/go/tasks/plugins/array/outputs.go b/flyteplugins/go/tasks/plugins/array/outputs.go index cb07fb0de1..611442de98 100644 --- a/flyteplugins/go/tasks/plugins/array/outputs.go +++ b/flyteplugins/go/tasks/plugins/array/outputs.go @@ -52,6 +52,7 @@ type assembleOutputsWorker struct { func (w assembleOutputsWorker) Process(ctx context.Context, workItem workqueue.WorkItem) (workqueue.WorkStatus, error) { i := workItem.(*outputAssembleItem) + // #nosec G115 outputReaders, err := ConstructOutputReaders(ctx, i.dataStore, i.outputPaths.GetOutputPrefixPath(), i.outputPaths.GetRawOutputPrefix(), int(i.finalPhases.ItemsCount)) if err != nil { logger.Warnf(ctx, "Failed to construct output readers. Error: %v", err) @@ -89,7 +90,7 @@ func (w assembleOutputsWorker) Process(ctx context.Context, workItem workqueue.W // to aggregate outputs here finalOutputs.Literals = output.GetLiterals() } else { - appendSubTaskOutput(finalOutputs, output, int64(i.finalPhases.ItemsCount)) + appendSubTaskOutput(finalOutputs, output, int64(i.finalPhases.ItemsCount)) // #nosec G115 continue } } @@ -110,7 +111,7 @@ func (w assembleOutputsWorker) Process(ctx context.Context, workItem workqueue.W } func appendOneItem(outputs *core.LiteralMap, varName string, literal *core.Literal, expectedSize int64) { - existingVal, found := outputs.Literals[varName] + existingVal, found := outputs.GetLiterals()[varName] var list *core.LiteralCollection if found { list = existingVal.GetCollection() @@ -155,7 +156,7 @@ func buildFinalPhases(executedTasks bitarray.CompactArray, indexes *bitarray.Bit // Set phases os already discovered tasks to success for i := uint(0); i < totalSize; i++ { if !indexes.IsSet(i) { - res.SetItem(int(i), bitarray.Item(pluginCore.PhaseSuccess)) + res.SetItem(int(i), bitarray.Item(pluginCore.PhaseSuccess)) // #nosec G115 } } @@ -199,14 +200,14 @@ func AssembleFinalOutputs(ctx context.Context, assemblyQueue OutputAssembler, tC } finalPhases := buildFinalPhases(state.GetArrayStatus().Detailed, - state.GetIndexesToCache(), uint(state.GetOriginalArraySize())) + state.GetIndexesToCache(), uint(state.GetOriginalArraySize())) // #nosec G115 err = assemblyQueue.Queue(ctx, workItemID, &outputAssembleItem{ varNames: varNames, finalPhases: finalPhases, outputPaths: tCtx.OutputWriter(), dataStore: tCtx.DataStore(), - isAwsSingleJob: taskTemplate.Type == AwsBatchTaskType, + isAwsSingleJob: taskTemplate.GetType() == AwsBatchTaskType, }) if err != nil { @@ -274,6 +275,7 @@ type assembleErrorsWorker struct { func (a assembleErrorsWorker) Process(ctx context.Context, workItem workqueue.WorkItem) (workqueue.WorkStatus, error) { w := workItem.(*outputAssembleItem) + // #nosec G115 outputReaders, err := ConstructOutputReaders(ctx, w.dataStore, w.outputPaths.GetOutputPrefixPath(), w.outputPaths.GetRawOutputPrefix(), int(w.finalPhases.ItemsCount)) if err != nil { return workqueue.WorkStatusNotDone, err diff --git a/flyteplugins/go/tasks/plugins/hive/execution_state.go b/flyteplugins/go/tasks/plugins/hive/execution_state.go index 16ac3835bd..b1d971d0d3 100644 --- a/flyteplugins/go/tasks/plugins/hive/execution_state.go +++ b/flyteplugins/go/tasks/plugins/hive/execution_state.go @@ -116,7 +116,7 @@ func MapExecutionStateToPhaseInfo(state ExecutionState, _ client.QuboleClient) c if state.CreationFailureCount > 5 { phaseInfo = core.PhaseInfoSystemRetryableFailure("QuboleFailure", "Too many creation attempts", nil) } else { - phaseInfo = core.PhaseInfoQueued(t, uint32(state.CreationFailureCount), "Waiting for Qubole launch") + phaseInfo = core.PhaseInfoQueued(t, uint32(state.CreationFailureCount), "Waiting for Qubole launch") // #nosec G115 } case PhaseSubmitted: phaseInfo = core.PhaseInfoRunning(core.DefaultPhaseVersion, ConstructTaskInfo(state)) @@ -240,7 +240,7 @@ func GetAllocationToken(ctx context.Context, tCtx core.TaskExecutionContext, cur } func validateQuboleHiveJob(hiveJob plugins.QuboleHiveJob) error { - if hiveJob.Query == nil { + if hiveJob.GetQuery() == nil { return errors.Errorf(errors.BadTaskSpecification, "Query could not be found. Please ensure that you are at least on Flytekit version 0.3.0 or later.") } @@ -267,7 +267,7 @@ func GetQueryInfo(ctx context.Context, tCtx core.TaskExecutionContext) ( return "", "", []string{}, 0, "", err } - query := hiveJob.Query.GetQuery() + query := hiveJob.GetQuery().GetQuery() outputs, err := template.Render(ctx, []string{query}, template.Parameters{ @@ -281,10 +281,10 @@ func GetQueryInfo(ctx context.Context, tCtx core.TaskExecutionContext) ( } formattedQuery = outputs[0] - cluster = hiveJob.ClusterLabel - timeoutSec = hiveJob.Query.TimeoutSec - taskName = taskTemplate.Id.Name - tags = hiveJob.Tags + cluster = hiveJob.GetClusterLabel() + timeoutSec = hiveJob.GetQuery().GetTimeoutSec() + taskName = taskTemplate.GetId().GetName() + tags = hiveJob.GetTags() tags = append(tags, fmt.Sprintf("ns:%s", tCtx.TaskExecutionMetadata().GetNamespace())) for k, v := range tCtx.TaskExecutionMetadata().GetLabels() { tags = append(tags, fmt.Sprintf("%s:%s", k, v)) @@ -326,8 +326,8 @@ func mapLabelToPrimaryLabel(ctx context.Context, quboleCfg *config.Config, label func mapProjectDomainToDestinationClusterLabel(ctx context.Context, tCtx core.TaskExecutionContext, quboleCfg *config.Config) (string, bool) { tExecID := tCtx.TaskExecutionMetadata().GetTaskExecutionID().GetID() - project := tExecID.NodeExecutionId.GetExecutionId().GetProject() - domain := tExecID.NodeExecutionId.GetExecutionId().GetDomain() + project := tExecID.GetNodeExecutionId().GetExecutionId().GetProject() + domain := tExecID.GetNodeExecutionId().GetExecutionId().GetDomain() logger.Debugf(ctx, "No clusterLabelOverride. Finding the pre-defined cluster label for (project: %v, domain: %v)", project, domain) // Using a linear search because N is small for _, m := range quboleCfg.DestinationClusterConfigs { @@ -504,7 +504,7 @@ func WriteOutputs(ctx context.Context, tCtx core.TaskExecutionContext, currentSt } externalLocation := tCtx.OutputWriter().GetRawOutputPrefix() - outputs := taskTemplate.Interface.Outputs.GetVariables() + outputs := taskTemplate.GetInterface().GetOutputs().GetVariables() if len(outputs) != 0 && len(outputs) != 1 { return currentState, errors.Errorf(errors.BadTaskSpecification, "Hive tasks must have zero or one output: [%d] found", len(outputs)) } diff --git a/flyteplugins/go/tasks/plugins/hive/execution_state_test.go b/flyteplugins/go/tasks/plugins/hive/execution_state_test.go index 4e34a04593..d67a53bb10 100644 --- a/flyteplugins/go/tasks/plugins/hive/execution_state_test.go +++ b/flyteplugins/go/tasks/plugins/hive/execution_state_test.go @@ -101,7 +101,7 @@ func TestConstructTaskLog(t *testing.T) { u, err := url.Parse(expected) assert.NoError(t, err) taskLog := ConstructTaskLog(ExecutionState{CommandID: "123", URI: u.String()}) - assert.Equal(t, expected, taskLog.Uri) + assert.Equal(t, expected, taskLog.GetUri()) } func TestConstructTaskInfo(t *testing.T) { @@ -120,7 +120,7 @@ func TestConstructTaskInfo(t *testing.T) { } taskInfo := ConstructTaskInfo(e) - assert.Equal(t, "https://wellness.qubole.com/v2/analyze?command_id=123", taskInfo.Logs[0].Uri) + assert.Equal(t, "https://wellness.qubole.com/v2/analyze?command_id=123", taskInfo.Logs[0].GetUri()) assert.Len(t, taskInfo.ExternalResources, 1) assert.Equal(t, taskInfo.ExternalResources[0].ExternalID, "123") } @@ -358,7 +358,7 @@ func TestWriteOutputs(t *testing.T) { literals, err1, err2 := reader.Read(context.Background()) assert.Nil(t, err1) assert.NoError(t, err2) - assert.NotNil(t, literals.Literals["results"].GetScalar().GetSchema()) + assert.NotNil(t, literals.GetLiterals()["results"].GetScalar().GetSchema()) }) state := ExecutionState{} diff --git a/flyteplugins/go/tasks/plugins/hive/executions_cache.go b/flyteplugins/go/tasks/plugins/hive/executions_cache.go index 40885ab093..6ce2fcf6e2 100644 --- a/flyteplugins/go/tasks/plugins/hive/executions_cache.go +++ b/flyteplugins/go/tasks/plugins/hive/executions_cache.go @@ -39,7 +39,8 @@ func NewQuboleHiveExecutionsCache(ctx context.Context, quboleClient client.Qubol scope: scope, cfg: cfg, } - autoRefreshCache, err := cache.NewAutoRefreshCache("qubole", q.SyncQuboleQuery, workqueue.DefaultControllerRateLimiter(), ResyncDuration, cfg.Workers, cfg.LruCacheSize, scope) + // #nosec G115 + autoRefreshCache, err := cache.NewAutoRefreshCache("qubole", q.SyncQuboleQuery, workqueue.DefaultControllerRateLimiter(), ResyncDuration, uint(cfg.Workers), uint(cfg.LruCacheSize), scope) if err != nil { logger.Errorf(ctx, "Could not create AutoRefreshCache in QuboleHiveExecutor. [%s]", err) return q, errors.Wrapf(errors.CacheFailed, err, "Error creating AutoRefreshCache") diff --git a/flyteplugins/go/tasks/plugins/k8s/dask/dask.go b/flyteplugins/go/tasks/plugins/k8s/dask/dask.go index ae68a4c760..df0e4f3472 100644 --- a/flyteplugins/go/tasks/plugins/k8s/dask/dask.go +++ b/flyteplugins/go/tasks/plugins/k8s/dask/dask.go @@ -20,7 +20,7 @@ import ( "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/k8s" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/tasklog" - "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/utils" + "github.com/flyteorg/flyte/flytestdlib/utils" ) const ( @@ -66,7 +66,7 @@ func (p daskResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsC } daskJob := plugins.DaskJob{} - err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &daskJob) + err = utils.UnmarshalStructToPb(taskTemplate.GetCustom(), &daskJob) if err != nil { return nil, errors.Wrapf(errors.BadTaskSpecification, err, "invalid TaskSpecification [%v], failed to unmarshal", taskTemplate.GetCustom()) } @@ -85,13 +85,13 @@ func (p daskResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsC mergeMapInto(taskCtx.TaskExecutionMetadata().GetAnnotations(), objectMeta.Annotations) mergeMapInto(taskCtx.TaskExecutionMetadata().GetLabels(), objectMeta.Labels) - workerSpec, err := createWorkerSpec(*daskJob.Workers, podSpec, primaryContainerName) + workerSpec, err := createWorkerSpec(daskJob.GetWorkers(), podSpec, primaryContainerName) if err != nil { return nil, err } clusterName := taskCtx.TaskExecutionMetadata().GetTaskExecutionID().GetGeneratedName() - schedulerSpec, err := createSchedulerSpec(*daskJob.Scheduler, clusterName, nonInterruptiblePodSpec, primaryContainerName) + schedulerSpec, err := createSchedulerSpec(daskJob.GetScheduler(), clusterName, nonInterruptiblePodSpec, primaryContainerName) if err != nil { return nil, err } @@ -112,7 +112,7 @@ func (p daskResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsC return job, nil } -func createWorkerSpec(cluster plugins.DaskWorkerGroup, podSpec *v1.PodSpec, primaryContainerName string) (*daskAPI.WorkerSpec, error) { +func createWorkerSpec(cluster *plugins.DaskWorkerGroup, podSpec *v1.PodSpec, primaryContainerName string) (*daskAPI.WorkerSpec, error) { workerPodSpec := podSpec.DeepCopy() primaryContainer, err := flytek8s.GetContainer(workerPodSpec, primaryContainerName) if err != nil { @@ -128,7 +128,7 @@ func createWorkerSpec(cluster plugins.DaskWorkerGroup, podSpec *v1.PodSpec, prim // Set custom resources resources := &primaryContainer.Resources clusterResources := cluster.GetResources() - if len(clusterResources.Requests) >= 1 || len(clusterResources.Limits) >= 1 { + if len(clusterResources.GetRequests()) >= 1 || len(clusterResources.GetLimits()) >= 1 { resources, err = flytek8s.ToK8sResourceRequirements(cluster.GetResources()) if err != nil { return nil, err @@ -174,7 +174,7 @@ func createWorkerSpec(cluster plugins.DaskWorkerGroup, podSpec *v1.PodSpec, prim }, nil } -func createSchedulerSpec(scheduler plugins.DaskScheduler, clusterName string, podSpec *v1.PodSpec, primaryContainerName string) (*daskAPI.SchedulerSpec, error) { +func createSchedulerSpec(scheduler *plugins.DaskScheduler, clusterName string, podSpec *v1.PodSpec, primaryContainerName string) (*daskAPI.SchedulerSpec, error) { schedulerPodSpec := podSpec.DeepCopy() primaryContainer, err := flytek8s.GetContainer(schedulerPodSpec, primaryContainerName) if err != nil { @@ -190,7 +190,7 @@ func createSchedulerSpec(scheduler plugins.DaskScheduler, clusterName string, po // Override resources if applicable resources := &primaryContainer.Resources schedulerResources := scheduler.GetResources() - if len(schedulerResources.Requests) >= 1 || len(schedulerResources.Limits) >= 1 { + if len(schedulerResources.GetRequests()) >= 1 || len(schedulerResources.GetLimits()) >= 1 { resources, err = flytek8s.ToK8sResourceRequirements(scheduler.GetResources()) if err != nil { return nil, err diff --git a/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go b/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go index eba53067ef..bc8b4adef4 100644 --- a/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go @@ -525,9 +525,10 @@ func TestBuildResouceDaskUsePodTemplate(t *testing.T) { func TestBuildResourceDaskExtendedResources(t *testing.T) { assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ - GpuDeviceNodeLabel: "gpu-node-label", - GpuPartitionSizeNodeLabel: "gpu-partition-size", - GpuResourceName: flytek8s.ResourceNvidiaGPU, + GpuDeviceNodeLabel: "gpu-node-label", + GpuPartitionSizeNodeLabel: "gpu-partition-size", + GpuResourceName: flytek8s.ResourceNvidiaGPU, + AddTolerationsForExtendedResources: []string{"nvidia.com/gpu"}, })) fixtures := []struct { @@ -569,6 +570,11 @@ func TestBuildResourceDaskExtendedResources(t *testing.T) { Operator: v1.TolerationOpEqual, Effect: v1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: v1.TolerationOpExists, + Effect: v1.TaintEffectNoSchedule, + }, }, }, { @@ -620,6 +626,11 @@ func TestBuildResourceDaskExtendedResources(t *testing.T) { Operator: v1.TolerationOpEqual, Effect: v1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: v1.TolerationOpExists, + Effect: v1.TaintEffectNoSchedule, + }, }, }, } diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go index d0e154835c..9196c788cc 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go @@ -178,7 +178,7 @@ func TestGetLogs(t *testing.T) { jobLogs, err := GetLogs(taskCtx, MPITaskType, mpiJobObjectMeta, false, workers, launcher, 0, 0) assert.NoError(t, err) assert.Equal(t, 1, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=mpi-namespace", "mpi-namespace", "test"), jobLogs[0].Uri) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=mpi-namespace", "mpi-namespace", "test"), jobLogs[0].GetUri()) pytorchJobObjectMeta := meta_v1.ObjectMeta{ Name: "test", @@ -187,8 +187,8 @@ func TestGetLogs(t *testing.T) { jobLogs, err = GetLogs(taskCtx, PytorchTaskType, pytorchJobObjectMeta, true, workers, launcher, 0, 0) assert.NoError(t, err) assert.Equal(t, 2, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-master-0/pod?namespace=pytorch-namespace", "pytorch-namespace", "test"), jobLogs[0].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=pytorch-namespace", "pytorch-namespace", "test"), jobLogs[1].Uri) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-master-0/pod?namespace=pytorch-namespace", "pytorch-namespace", "test"), jobLogs[0].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=pytorch-namespace", "pytorch-namespace", "test"), jobLogs[1].GetUri()) tensorflowJobObjectMeta := meta_v1.ObjectMeta{ Name: "test", @@ -197,9 +197,9 @@ func TestGetLogs(t *testing.T) { jobLogs, err = GetLogs(taskCtx, TensorflowTaskType, tensorflowJobObjectMeta, false, workers, launcher, 1, 0) assert.NoError(t, err) assert.Equal(t, 3, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=tensorflow-namespace", "tensorflow-namespace", "test"), jobLogs[0].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-psReplica-0/pod?namespace=tensorflow-namespace", "tensorflow-namespace", "test"), jobLogs[1].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-chiefReplica-0/pod?namespace=tensorflow-namespace", "tensorflow-namespace", "test"), jobLogs[2].Uri) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=tensorflow-namespace", "tensorflow-namespace", "test"), jobLogs[0].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-psReplica-0/pod?namespace=tensorflow-namespace", "tensorflow-namespace", "test"), jobLogs[1].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-chiefReplica-0/pod?namespace=tensorflow-namespace", "tensorflow-namespace", "test"), jobLogs[2].GetUri()) } @@ -221,8 +221,8 @@ func TestGetLogsTemplateUri(t *testing.T) { jobLogs, err := GetLogs(taskCtx, PytorchTaskType, pytorchJobObjectMeta, true, 1, 0, 0, 0) assert.NoError(t, err) assert.Equal(t, 2, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("https://console.cloud.google.com/logs/query;query=resource.labels.pod_name=%s-master-0×tamp>%s", "test", "2022-01-01T12:00:00Z"), jobLogs[0].Uri) - assert.Equal(t, fmt.Sprintf("https://console.cloud.google.com/logs/query;query=resource.labels.pod_name=%s-worker-0×tamp>%s", "test", "2022-01-01T12:00:00Z"), jobLogs[1].Uri) + assert.Equal(t, fmt.Sprintf("https://console.cloud.google.com/logs/query;query=resource.labels.pod_name=%s-master-0×tamp>%s", "test", "2022-01-01T12:00:00Z"), jobLogs[0].GetUri()) + assert.Equal(t, fmt.Sprintf("https://console.cloud.google.com/logs/query;query=resource.labels.pod_name=%s-worker-0×tamp>%s", "test", "2022-01-01T12:00:00Z"), jobLogs[1].GetUri()) } func dummyPodSpec() v1.PodSpec { diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi.go index 53e4d30ccb..7ba2c0cb86 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi.go @@ -60,7 +60,7 @@ func (mpiOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx plu var launcherReplicaSpec, workerReplicaSpec *commonOp.ReplicaSpec - if taskTemplate.TaskTypeVersion == 0 { + if taskTemplate.GetTaskTypeVersion() == 0 { mpiTaskExtraArgs := plugins.DistributedMPITrainingTask{} err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &mpiTaskExtraArgs) if err != nil { @@ -98,7 +98,7 @@ func (mpiOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx plu } } - } else if taskTemplate.TaskTypeVersion == 1 { + } else if taskTemplate.GetTaskTypeVersion() == 1 { kfMPITaskExtraArgs := kfplugins.DistributedMPITrainingTask{} err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &kfMPITaskExtraArgs) @@ -122,7 +122,7 @@ func (mpiOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx plu } else { return nil, flyteerr.Errorf(flyteerr.BadTaskSpecification, - "Invalid TaskSpecification, unsupported task template version [%v] key", taskTemplate.TaskTypeVersion) + "Invalid TaskSpecification, unsupported task template version [%v] key", taskTemplate.GetTaskTypeVersion()) } if *workerReplicaSpec.Replicas <= 0 { diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go index 6c0080d45a..346b34adb6 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go @@ -368,9 +368,10 @@ func TestBuildResourceMPIForWrongInput(t *testing.T) { func TestBuildResourceMPIExtendedResources(t *testing.T) { assert.NoError(t, flytek8sConfig.SetK8sPluginConfig(&flytek8sConfig.K8sPluginConfig{ - GpuDeviceNodeLabel: "gpu-node-label", - GpuPartitionSizeNodeLabel: "gpu-partition-size", - GpuResourceName: flytek8s.ResourceNvidiaGPU, + GpuDeviceNodeLabel: "gpu-node-label", + GpuPartitionSizeNodeLabel: "gpu-partition-size", + GpuResourceName: flytek8s.ResourceNvidiaGPU, + AddTolerationsForExtendedResources: []string{"nvidia.com/gpu"}, })) fixtures := []struct { @@ -412,6 +413,11 @@ func TestBuildResourceMPIExtendedResources(t *testing.T) { Operator: corev1.TolerationOpEqual, Effect: corev1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: corev1.TolerationOpExists, + Effect: corev1.TaintEffectNoSchedule, + }, }, }, { @@ -463,6 +469,11 @@ func TestBuildResourceMPIExtendedResources(t *testing.T) { Operator: corev1.TolerationOpEqual, Effect: corev1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: corev1.TolerationOpExists, + Effect: corev1.TaintEffectNoSchedule, + }, }, }, } @@ -569,8 +580,8 @@ func TestGetLogs(t *testing.T) { jobLogs, err := common.GetLogs(taskCtx, common.MPITaskType, mpiJob.ObjectMeta, false, workers, launcher, 0, 0) assert.NoError(t, err) assert.Equal(t, 2, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=mpi-namespace", jobNamespace, jobName), jobLogs[0].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=mpi-namespace", jobNamespace, jobName), jobLogs[1].Uri) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=mpi-namespace", jobNamespace, jobName), jobLogs[0].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=mpi-namespace", jobNamespace, jobName), jobLogs[1].GetUri()) } func TestGetProperties(t *testing.T) { diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch.go index 6d7c80a7fd..3dc81ce41a 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch.go @@ -18,6 +18,7 @@ import ( "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery" pluginsCore "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core" pluginsK8s "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s" + k8sConfig "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s/config" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/k8s" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/utils" "github.com/flyteorg/flyte/flyteplugins/go/tasks/plugins/k8s/kfoperators/common" @@ -30,8 +31,14 @@ type pytorchOperatorResourceHandler struct { var _ k8s.Plugin = pytorchOperatorResourceHandler{} func (pytorchOperatorResourceHandler) GetProperties() k8s.PluginProperties { - return k8s.PluginProperties{ - ErrorAggregationStrategy: k8s.EarliestErrorAggregationStrategy, + config := k8sConfig.GetK8sPluginConfig() + + if config.EnableDistributedErrorAggregation { + return k8s.PluginProperties{ + ErrorAggregationStrategy: k8s.EarliestErrorAggregationStrategy, + } + } else { + return k8s.PluginProperties{} } } @@ -47,7 +54,7 @@ func (pytorchOperatorResourceHandler) BuildIdentityResource(ctx context.Context, } // Defines a func to create the full resource object that will be posted to k8s. -func (pytorchOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsCore.TaskExecutionContext) (client.Object, error) { +func (p pytorchOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsCore.TaskExecutionContext) (client.Object, error) { taskTemplate, err := taskCtx.TaskReader().Read(ctx) if err != nil { @@ -61,7 +68,7 @@ func (pytorchOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx var masterReplicaSpec, workerReplicaSpec *commonOp.ReplicaSpec - if taskTemplate.TaskTypeVersion == 0 { + if taskTemplate.GetTaskTypeVersion() == 0 { pytorchTaskExtraArgs := plugins.DistributedPyTorchTrainingTask{} err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &pytorchTaskExtraArgs) @@ -85,7 +92,7 @@ func (pytorchOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx if elasticConfig != nil { elasticPolicy = ParseElasticConfig(elasticConfig) } - } else if taskTemplate.TaskTypeVersion == 1 { + } else if taskTemplate.GetTaskTypeVersion() == 1 { kfPytorchTaskExtraArgs := kfplugins.DistributedPyTorchTrainingTask{} err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &kfPytorchTaskExtraArgs) @@ -115,10 +122,13 @@ func (pytorchOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx }, }, }) - container.Env = append(container.Env, apiv1.EnvVar{ - Name: pluginsK8s.FlyteInternalDistErrorStrategyEnvVarKey, - Value: k8s.EarliestErrorAggregationStrategy.String(), - }) + + if p.GetProperties().ErrorAggregationStrategy == k8s.EarliestErrorAggregationStrategy { + container.Env = append(container.Env, apiv1.EnvVar{ + Name: pluginsK8s.FlyteInternalDistErrorStrategyEnvVarKey, + Value: k8s.EarliestErrorAggregationStrategy.String(), + }) + } } updateEnvVars(&workerReplicaSpec.Template.Spec.Containers[0]) @@ -132,7 +142,7 @@ func (pytorchOperatorResourceHandler) BuildResource(ctx context.Context, taskCtx } } else { return nil, flyteerr.Errorf(flyteerr.BadTaskSpecification, - "Invalid TaskSpecification, unsupported task template version [%v] key", taskTemplate.TaskTypeVersion) + "Invalid TaskSpecification, unsupported task template version [%v] key", taskTemplate.GetTaskTypeVersion()) } if *workerReplicaSpec.Replicas <= 0 { diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go index 814b340fe6..ab319561cf 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go @@ -26,6 +26,7 @@ import ( "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s" pluginsK8s "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s" flytek8sConfig "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s/config" + k8sConfig "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s/config" pluginIOMocks "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/io/mocks" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/k8s" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/utils" @@ -473,9 +474,10 @@ func TestBuildResourcePytorchContainerImage(t *testing.T) { func TestBuildResourcePytorchExtendedResources(t *testing.T) { assert.NoError(t, flytek8sConfig.SetK8sPluginConfig(&flytek8sConfig.K8sPluginConfig{ - GpuDeviceNodeLabel: "gpu-node-label", - GpuPartitionSizeNodeLabel: "gpu-partition-size", - GpuResourceName: flytek8s.ResourceNvidiaGPU, + GpuDeviceNodeLabel: "gpu-node-label", + GpuPartitionSizeNodeLabel: "gpu-partition-size", + GpuResourceName: flytek8s.ResourceNvidiaGPU, + AddTolerationsForExtendedResources: []string{"nvidia.com/gpu"}, })) fixtures := []struct { @@ -517,6 +519,11 @@ func TestBuildResourcePytorchExtendedResources(t *testing.T) { Operator: corev1.TolerationOpEqual, Effect: corev1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: corev1.TolerationOpExists, + Effect: corev1.TaintEffectNoSchedule, + }, }, }, { @@ -568,6 +575,11 @@ func TestBuildResourcePytorchExtendedResources(t *testing.T) { Operator: corev1.TolerationOpEqual, Effect: corev1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: corev1.TolerationOpExists, + Effect: corev1.TaintEffectNoSchedule, + }, }, }, } @@ -688,9 +700,9 @@ func TestGetLogs(t *testing.T) { jobLogs, err := common.GetLogs(taskCtx, common.PytorchTaskType, pytorchJob.ObjectMeta, hasMaster, workers, 0, 0, 0) assert.NoError(t, err) assert.Equal(t, 3, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-master-0/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[0].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[1].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[2].Uri) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-master-0/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[0].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[1].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[2].GetUri()) } func TestGetLogsElastic(t *testing.T) { @@ -708,13 +720,19 @@ func TestGetLogsElastic(t *testing.T) { jobLogs, err := common.GetLogs(taskCtx, common.PytorchTaskType, pytorchJob.ObjectMeta, hasMaster, workers, 0, 0, 0) assert.NoError(t, err) assert.Equal(t, 2, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[0].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[1].Uri) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[0].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=pytorch-namespace", jobNamespace, jobName), jobLogs[1].GetUri()) } func TestGetProperties(t *testing.T) { + config := k8sConfig.GetK8sPluginConfig() pytorchResourceHandler := pytorchOperatorResourceHandler{} - expected := k8s.PluginProperties{ + + expected := k8s.PluginProperties{} + assert.Equal(t, expected, pytorchResourceHandler.GetProperties()) + + config.EnableDistributedErrorAggregation = true + expected = k8s.PluginProperties{ ErrorAggregationStrategy: k8s.EarliestErrorAggregationStrategy, } assert.Equal(t, expected, pytorchResourceHandler.GetProperties()) @@ -850,6 +868,8 @@ func TestBuildResourcePytorchV1(t *testing.T) { }, } + config := k8sConfig.GetK8sPluginConfig() + config.EnableDistributedErrorAggregation = true pytorchResourceHandler := pytorchOperatorResourceHandler{} taskTemplate := dummyPytorchTaskTemplate("job4", taskConfig) diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow.go index 93b4d91cd2..3c0a3e9485 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow.go @@ -55,7 +55,7 @@ func (tensorflowOperatorResourceHandler) BuildResource(ctx context.Context, task replicaSpecMap := make(map[commonOp.ReplicaType]*commonOp.ReplicaSpec) runPolicy := commonOp.RunPolicy{} - if taskTemplate.TaskTypeVersion == 0 { + if taskTemplate.GetTaskTypeVersion() == 0 { tensorflowTaskExtraArgs := plugins.DistributedTensorflowTrainingTask{} err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &tensorflowTaskExtraArgs) @@ -83,7 +83,7 @@ func (tensorflowOperatorResourceHandler) BuildResource(ctx context.Context, task } } - } else if taskTemplate.TaskTypeVersion == 1 { + } else if taskTemplate.GetTaskTypeVersion() == 1 { kfTensorflowTaskExtraArgs := kfplugins.DistributedTensorflowTrainingTask{} err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &kfTensorflowTaskExtraArgs) @@ -125,7 +125,7 @@ func (tensorflowOperatorResourceHandler) BuildResource(ctx context.Context, task } else { return nil, flyteerr.Errorf(flyteerr.BadTaskSpecification, - "Invalid TaskSpecification, unsupported task template version [%v] key", taskTemplate.TaskTypeVersion) + "Invalid TaskSpecification, unsupported task template version [%v] key", taskTemplate.GetTaskTypeVersion()) } if v, ok := replicaSpecMap[kubeflowv1.TFJobReplicaTypeWorker]; !ok || *v.Replicas <= 0 { diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go index d4d6e6da17..22b750c22b 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go @@ -628,11 +628,11 @@ func TestGetLogs(t *testing.T) { workers, psReplicas, chiefReplicas, evaluatorReplicas) assert.NoError(t, err) assert.Equal(t, 5, len(jobLogs)) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[0].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[1].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-psReplica-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[2].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-chiefReplica-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[3].Uri) - assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-evaluatorReplica-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[4].Uri) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[0].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-worker-1/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[1].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-psReplica-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[2].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-chiefReplica-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[3].GetUri()) + assert.Equal(t, fmt.Sprintf("k8s.com/#!/log/%s/%s-evaluatorReplica-0/pod?namespace=tensorflow-namespace", jobNamespace, jobName), jobLogs[4].GetUri()) } func TestGetProperties(t *testing.T) { diff --git a/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go b/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go index 2a08cd0e6c..60b0d5c8d5 100644 --- a/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go +++ b/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go @@ -59,7 +59,7 @@ func (p plugin) BuildResource(ctx context.Context, taskCtx pluginsCore.TaskExecu } primaryContainerName := "" - if taskTemplate.Type == SidecarTaskType && taskTemplate.TaskTypeVersion == 0 { + if taskTemplate.GetType() == SidecarTaskType && taskTemplate.GetTaskTypeVersion() == 0 { // handles pod tasks when they are defined as Sidecar tasks and marshal the podspec using k8s proto. sidecarJob := sidecarJob{} err := utils.UnmarshalStructToObj(taskTemplate.GetCustom(), &sidecarJob) @@ -79,7 +79,7 @@ func (p plugin) BuildResource(ctx context.Context, taskCtx pluginsCore.TaskExecu // update annotations and labels objectMeta.Annotations = utils.UnionMaps(objectMeta.Annotations, sidecarJob.Annotations) objectMeta.Labels = utils.UnionMaps(objectMeta.Labels, sidecarJob.Labels) - } else if taskTemplate.Type == SidecarTaskType && taskTemplate.TaskTypeVersion == 1 { + } else if taskTemplate.GetType() == SidecarTaskType && taskTemplate.GetTaskTypeVersion() == 1 { // handles pod tasks that marshal the pod spec to the task custom. err := utils.UnmarshalStructToObj(taskTemplate.GetCustom(), &podSpec) if err != nil { @@ -100,9 +100,9 @@ func (p plugin) BuildResource(ctx context.Context, taskCtx pluginsCore.TaskExecu } // update annotations and labels - if taskTemplate.GetK8SPod() != nil && taskTemplate.GetK8SPod().Metadata != nil { - objectMeta.Annotations = utils.UnionMaps(objectMeta.Annotations, taskTemplate.GetK8SPod().Metadata.Annotations) - objectMeta.Labels = utils.UnionMaps(objectMeta.Labels, taskTemplate.GetK8SPod().Metadata.Labels) + if taskTemplate.GetK8SPod() != nil && taskTemplate.GetK8SPod().GetMetadata() != nil { + objectMeta.Annotations = utils.UnionMaps(objectMeta.Annotations, taskTemplate.GetK8SPod().GetMetadata().GetAnnotations()) + objectMeta.Labels = utils.UnionMaps(objectMeta.Labels, taskTemplate.GetK8SPod().GetMetadata().GetLabels()) } } else { // handles both container / pod tasks that use the TaskTemplate Container and K8sPod fields @@ -122,7 +122,7 @@ func (p plugin) BuildResource(ctx context.Context, taskCtx pluginsCore.TaskExecu // set primaryContainerKey annotation if this is a Sidecar task or, as an optimization, if there is only a single // container. this plugin marks the task complete if the primary Container is complete, so if there is only one // container we can mark the task as complete before the Pod has been marked complete. - if taskTemplate.Type == SidecarTaskType || len(podSpec.Containers) == 1 { + if taskTemplate.GetType() == SidecarTaskType || len(podSpec.Containers) == 1 { objectMeta.Annotations[flytek8s.PrimaryContainerKey] = primaryContainerName } diff --git a/flyteplugins/go/tasks/plugins/k8s/ray/ray.go b/flyteplugins/go/tasks/plugins/k8s/ray/ray.go index cf55e29d07..76e595b006 100644 --- a/flyteplugins/go/tasks/plugins/k8s/ray/ray.go +++ b/flyteplugins/go/tasks/plugins/k8s/ray/ray.go @@ -27,7 +27,8 @@ import ( "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s/config" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/k8s" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/tasklog" - "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/utils" + pluginsUtils "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/utils" + "github.com/flyteorg/flyte/flytestdlib/utils" ) const ( @@ -66,7 +67,7 @@ func (rayJobResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsC } rayJob := plugins.RayJob{} - err = utils.UnmarshalStruct(taskTemplate.GetCustom(), &rayJob) + err = utils.UnmarshalStructToPb(taskTemplate.GetCustom(), &rayJob) if err != nil { return nil, flyteerr.Errorf(flyteerr.BadTaskSpecification, "invalid TaskSpecification [%v], Err: [%v]", taskTemplate.GetCustom(), err.Error()) } @@ -94,8 +95,8 @@ func (rayJobResourceHandler) BuildResource(ctx context.Context, taskCtx pluginsC cfg := GetConfig() headNodeRayStartParams := make(map[string]string) - if rayJob.RayCluster.HeadGroupSpec != nil && rayJob.RayCluster.HeadGroupSpec.RayStartParams != nil { - headNodeRayStartParams = rayJob.RayCluster.HeadGroupSpec.RayStartParams + if rayJob.GetRayCluster().GetHeadGroupSpec() != nil && rayJob.RayCluster.HeadGroupSpec.RayStartParams != nil { + headNodeRayStartParams = rayJob.GetRayCluster().GetHeadGroupSpec().GetRayStartParams() } else if headNode := cfg.Defaults.HeadNode; len(headNode.StartParameters) > 0 { headNodeRayStartParams = headNode.StartParameters } @@ -133,7 +134,7 @@ func constructRayJob(taskCtx pluginsCore.TaskExecutionContext, rayJob *plugins.R headPodSpec, objectMeta, taskCtx, - rayJob.RayCluster.HeadGroupSpec, + rayJob.GetRayCluster().GetHeadGroupSpec(), ) if err != nil { return nil, err @@ -150,7 +151,7 @@ func constructRayJob(taskCtx pluginsCore.TaskExecutionContext, rayJob *plugins.R EnableInTreeAutoscaling: &rayJob.RayCluster.EnableAutoscaling, } - for _, spec := range rayJob.RayCluster.WorkerGroupSpec { + for _, spec := range rayJob.GetRayCluster().GetWorkerGroupSpec() { workerPodSpec := taskPodSpec.DeepCopy() workerPodTemplate, err := buildWorkerPodTemplate( &workerPodSpec.Containers[primaryContainerIdx], @@ -165,7 +166,7 @@ func constructRayJob(taskCtx pluginsCore.TaskExecutionContext, rayJob *plugins.R workerNodeRayStartParams := make(map[string]string) if spec.RayStartParams != nil { - workerNodeRayStartParams = spec.RayStartParams + workerNodeRayStartParams = spec.GetRayStartParams() } else if workerNode := cfg.Defaults.WorkerNode; len(workerNode.StartParameters) > 0 { workerNodeRayStartParams = workerNode.StartParameters } @@ -178,17 +179,17 @@ func constructRayJob(taskCtx pluginsCore.TaskExecutionContext, rayJob *plugins.R workerNodeRayStartParams[DisableUsageStatsStartParameter] = DisableUsageStatsStartParameterVal } - minReplicas := spec.MinReplicas - if minReplicas > spec.Replicas { - minReplicas = spec.Replicas + minReplicas := spec.GetMinReplicas() + if minReplicas > spec.GetReplicas() { + minReplicas = spec.GetReplicas() } - maxReplicas := spec.MaxReplicas - if maxReplicas < spec.Replicas { - maxReplicas = spec.Replicas + maxReplicas := spec.GetMaxReplicas() + if maxReplicas < spec.GetReplicas() { + maxReplicas = spec.GetReplicas() } workerNodeSpec := rayv1.WorkerGroupSpec{ - GroupName: spec.GroupName, + GroupName: spec.GetGroupName(), MinReplicas: &minReplicas, MaxReplicas: &maxReplicas, Replicas: &spec.Replicas, @@ -211,7 +212,7 @@ func constructRayJob(taskCtx pluginsCore.TaskExecutionContext, rayJob *plugins.R shutdownAfterJobFinishes := cfg.ShutdownAfterJobFinishes ttlSecondsAfterFinished := &cfg.TTLSecondsAfterFinished - if rayJob.ShutdownAfterJobFinishes { + if rayJob.GetShutdownAfterJobFinishes() { shutdownAfterJobFinishes = true ttlSecondsAfterFinished = &rayJob.TtlSecondsAfterFinished } @@ -221,10 +222,10 @@ func constructRayJob(taskCtx pluginsCore.TaskExecutionContext, rayJob *plugins.R // TODO: This is for backward compatibility. Remove this block once runtime_env is removed from ray proto. var runtimeEnvYaml string - runtimeEnvYaml = rayJob.RuntimeEnvYaml + runtimeEnvYaml = rayJob.GetRuntimeEnvYaml() // If runtime_env exists but runtime_env_yaml does not, convert runtime_env to runtime_env_yaml - if rayJob.RuntimeEnv != "" && rayJob.RuntimeEnvYaml == "" { - runtimeEnvYaml, err = convertBase64RuntimeEnvToYaml(rayJob.RuntimeEnv) + if rayJob.GetRuntimeEnv() != "" && rayJob.GetRuntimeEnvYaml() == "" { + runtimeEnvYaml, err = convertBase64RuntimeEnvToYaml(rayJob.GetRuntimeEnv()) if err != nil { return nil, err } @@ -369,18 +370,20 @@ func buildHeadPodTemplate(primaryContainer *v1.Container, basePodSpec *v1.PodSpe // Inject a sidecar for capturing and exposing Ray job logs injectLogsSidecar(primaryContainer, basePodSpec) - basePodSpec, err := mergeCustomPodSpec(primaryContainer, basePodSpec, spec.K8SPod) + basePodSpec, err := mergeCustomPodSpec(primaryContainer, basePodSpec, spec.GetK8SPod()) if err != nil { return v1.PodTemplateSpec{}, err } + basePodSpec = flytek8s.AddTolerationsForExtendedResources(basePodSpec) + podTemplateSpec := v1.PodTemplateSpec{ Spec: *basePodSpec, ObjectMeta: *objectMeta, } cfg := config.GetK8sPluginConfig() - podTemplateSpec.SetLabels(utils.UnionMaps(cfg.DefaultLabels, podTemplateSpec.GetLabels(), utils.CopyMap(taskCtx.TaskExecutionMetadata().GetLabels()))) - podTemplateSpec.SetAnnotations(utils.UnionMaps(cfg.DefaultAnnotations, podTemplateSpec.GetAnnotations(), utils.CopyMap(taskCtx.TaskExecutionMetadata().GetAnnotations()))) + podTemplateSpec.SetLabels(pluginsUtils.UnionMaps(cfg.DefaultLabels, podTemplateSpec.GetLabels(), pluginsUtils.CopyMap(taskCtx.TaskExecutionMetadata().GetLabels()))) + podTemplateSpec.SetAnnotations(pluginsUtils.UnionMaps(cfg.DefaultAnnotations, podTemplateSpec.GetAnnotations(), pluginsUtils.CopyMap(taskCtx.TaskExecutionMetadata().GetAnnotations()))) return podTemplateSpec, nil } @@ -393,8 +396,8 @@ func buildSubmitterPodTemplate(podSpec *v1.PodSpec, objectMeta *metav1.ObjectMet } cfg := config.GetK8sPluginConfig() - podTemplateSpec.SetLabels(utils.UnionMaps(cfg.DefaultLabels, podTemplateSpec.GetLabels(), utils.CopyMap(taskCtx.TaskExecutionMetadata().GetLabels()))) - podTemplateSpec.SetAnnotations(utils.UnionMaps(cfg.DefaultAnnotations, podTemplateSpec.GetAnnotations(), utils.CopyMap(taskCtx.TaskExecutionMetadata().GetAnnotations()))) + podTemplateSpec.SetLabels(pluginsUtils.UnionMaps(cfg.DefaultLabels, podTemplateSpec.GetLabels(), pluginsUtils.CopyMap(taskCtx.TaskExecutionMetadata().GetLabels()))) + podTemplateSpec.SetAnnotations(pluginsUtils.UnionMaps(cfg.DefaultAnnotations, podTemplateSpec.GetAnnotations(), pluginsUtils.CopyMap(taskCtx.TaskExecutionMetadata().GetAnnotations()))) return podTemplateSpec } @@ -497,17 +500,20 @@ func buildWorkerPodTemplate(primaryContainer *v1.Container, basePodSpec *v1.PodS } primaryContainer.Ports = append(primaryContainer.Ports, ports...) - basePodSpec, err := mergeCustomPodSpec(primaryContainer, basePodSpec, spec.K8SPod) + basePodSpec, err := mergeCustomPodSpec(primaryContainer, basePodSpec, spec.GetK8SPod()) if err != nil { return v1.PodTemplateSpec{}, err } + basePodSpec = flytek8s.AddTolerationsForExtendedResources(basePodSpec) + + cfg := config.GetK8sPluginConfig() podTemplateSpec := v1.PodTemplateSpec{ Spec: *basePodSpec, ObjectMeta: *objectMetadata, } - podTemplateSpec.SetLabels(utils.UnionMaps(podTemplateSpec.GetLabels(), utils.CopyMap(taskCtx.TaskExecutionMetadata().GetLabels()))) - podTemplateSpec.SetAnnotations(utils.UnionMaps(podTemplateSpec.GetAnnotations(), utils.CopyMap(taskCtx.TaskExecutionMetadata().GetAnnotations()))) + podTemplateSpec.SetLabels(pluginsUtils.UnionMaps(cfg.DefaultLabels, podTemplateSpec.GetLabels(), pluginsUtils.CopyMap(taskCtx.TaskExecutionMetadata().GetLabels()))) + podTemplateSpec.SetAnnotations(pluginsUtils.UnionMaps(cfg.DefaultAnnotations, podTemplateSpec.GetAnnotations(), pluginsUtils.CopyMap(taskCtx.TaskExecutionMetadata().GetAnnotations()))) return podTemplateSpec, nil } @@ -517,16 +523,16 @@ func mergeCustomPodSpec(primaryContainer *v1.Container, podSpec *v1.PodSpec, k8s return podSpec, nil } - if k8sPod.PodSpec == nil { + if k8sPod.GetPodSpec() == nil { return podSpec, nil } var customPodSpec *v1.PodSpec - err := utils.UnmarshalStructToObj(k8sPod.PodSpec, &customPodSpec) + err := utils.UnmarshalStructToObj(k8sPod.GetPodSpec(), &customPodSpec) if err != nil { return nil, flyteerr.Errorf(flyteerr.BadTaskSpecification, - "Unable to unmarshal pod spec [%v], Err: [%v]", k8sPod.PodSpec, err.Error()) + "Unable to unmarshal pod spec [%v], Err: [%v]", k8sPod.GetPodSpec(), err.Error()) } for _, container := range customPodSpec.Containers { diff --git a/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go b/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go index 2cd3eb8893..42978cac81 100644 --- a/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go @@ -27,7 +27,7 @@ import ( "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/k8s" mocks2 "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/k8s/mocks" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/tasklog" - "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/utils" + "github.com/flyteorg/flyte/flytestdlib/utils" ) const ( @@ -280,9 +280,10 @@ func TestBuildResourceRayContainerImage(t *testing.T) { func TestBuildResourceRayExtendedResources(t *testing.T) { assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ - GpuDeviceNodeLabel: "gpu-node-label", - GpuPartitionSizeNodeLabel: "gpu-partition-size", - GpuResourceName: flytek8s.ResourceNvidiaGPU, + GpuDeviceNodeLabel: "gpu-node-label", + GpuPartitionSizeNodeLabel: "gpu-partition-size", + GpuResourceName: flytek8s.ResourceNvidiaGPU, + AddTolerationsForExtendedResources: []string{"nvidia.com/gpu"}, })) params := []struct { @@ -324,6 +325,11 @@ func TestBuildResourceRayExtendedResources(t *testing.T) { Operator: corev1.TolerationOpEqual, Effect: corev1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: corev1.TolerationOpExists, + Effect: corev1.TaintEffectNoSchedule, + }, }, }, { @@ -375,6 +381,11 @@ func TestBuildResourceRayExtendedResources(t *testing.T) { Operator: corev1.TolerationOpEqual, Effect: corev1.TaintEffectNoSchedule, }, + { + Key: "nvidia.com/gpu", + Operator: corev1.TolerationOpExists, + Effect: corev1.TaintEffectNoSchedule, + }, }, }, } @@ -500,7 +511,7 @@ func TestBuildResourceRayCustomK8SPod(t *testing.T) { } if p.workerK8SPod != nil { - for _, spec := range rayJobInput.RayCluster.WorkerGroupSpec { + for _, spec := range rayJobInput.GetRayCluster().GetWorkerGroupSpec() { spec.K8SPod = p.workerK8SPod } } diff --git a/flyteplugins/go/tasks/plugins/k8s/spark/spark.go b/flyteplugins/go/tasks/plugins/k8s/spark/spark.go index 8b766a391a..6873fc2257 100644 --- a/flyteplugins/go/tasks/plugins/k8s/spark/spark.go +++ b/flyteplugins/go/tasks/plugins/k8s/spark/spark.go @@ -44,7 +44,7 @@ func validateSparkJob(sparkJob *plugins.SparkJob) error { return fmt.Errorf("empty sparkJob") } - if len(sparkJob.MainApplicationFile) == 0 && len(sparkJob.MainClass) == 0 { + if len(sparkJob.GetMainApplicationFile()) == 0 && len(sparkJob.GetMainClass()) == 0 { return fmt.Errorf("either MainApplicationFile or MainClass must be set") } @@ -262,10 +262,10 @@ func createSparkApplication(sparkJob *plugins.SparkJob, sparkConfig map[string]s app.Spec.BatchScheduler = &val } - if sparkJob.MainApplicationFile != "" { + if sparkJob.GetMainApplicationFile() != "" { app.Spec.MainApplicationFile = &sparkJob.MainApplicationFile } - if sparkJob.MainClass != "" { + if sparkJob.GetMainClass() != "" { app.Spec.MainClass = &sparkJob.MainClass } return app diff --git a/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go b/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go index d657d4c273..0a6f51d0e2 100644 --- a/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go @@ -101,10 +101,10 @@ func TestGetEventInfo(t *testing.T) { info, err := getEventInfoForSpark(taskCtx, dummySparkApplication(sj.RunningState)) assert.NoError(t, err) assert.Len(t, info.Logs, 6) - assert.Equal(t, "https://spark-ui.flyte", info.CustomInfo.Fields[sparkDriverUI].GetStringValue()) + assert.Equal(t, "https://spark-ui.flyte", info.CustomInfo.GetFields()[sparkDriverUI].GetStringValue()) generatedLinks := make([]string, 0, len(info.Logs)) for _, l := range info.Logs { - generatedLinks = append(generatedLinks, l.Uri) + generatedLinks = append(generatedLinks, l.GetUri()) } expectedLinks := []string{ @@ -121,12 +121,12 @@ func TestGetEventInfo(t *testing.T) { info, err = getEventInfoForSpark(taskCtx, dummySparkApplication(sj.SubmittedState)) generatedLinks = make([]string, 0, len(info.Logs)) for _, l := range info.Logs { - generatedLinks = append(generatedLinks, l.Uri) + generatedLinks = append(generatedLinks, l.GetUri()) } assert.NoError(t, err) assert.Len(t, info.Logs, 5) assert.Equal(t, expectedLinks[:5], generatedLinks) // No Spark Driver UI for Submitted state - assert.True(t, info.Logs[4].ShowWhilePending) // All User Logs should be shown while pending + assert.True(t, info.Logs[4].GetShowWhilePending()) // All User Logs should be shown while pending assert.NoError(t, setSparkConfig(&Config{ SparkHistoryServerURL: "spark-history.flyte", @@ -151,10 +151,10 @@ func TestGetEventInfo(t *testing.T) { info, err = getEventInfoForSpark(taskCtx, dummySparkApplication(sj.FailedState)) assert.NoError(t, err) assert.Len(t, info.Logs, 5) - assert.Equal(t, "spark-history.flyte/history/app-id", info.CustomInfo.Fields[sparkHistoryUI].GetStringValue()) + assert.Equal(t, "spark-history.flyte/history/app-id", info.CustomInfo.GetFields()[sparkHistoryUI].GetStringValue()) generatedLinks = make([]string, 0, len(info.Logs)) for _, l := range info.Logs { - generatedLinks = append(generatedLinks, l.Uri) + generatedLinks = append(generatedLinks, l.GetUri()) } expectedLinks = []string{ @@ -853,7 +853,7 @@ func TestBuildResourcePodTemplate(t *testing.T) { assert.Equal(t, defaultConfig.DefaultEnvVars["foo"], findEnvVarByName(sparkApp.Spec.Driver.Env, "foo").Value) assert.Equal(t, defaultConfig.DefaultEnvVars["fooEnv"], findEnvVarByName(sparkApp.Spec.Driver.Env, "fooEnv").Value) assert.Equal(t, findEnvVarByName(dummyEnvVarsWithSecretRef, "SECRET"), findEnvVarByName(sparkApp.Spec.Driver.Env, "SECRET")) - assert.Equal(t, 10, len(sparkApp.Spec.Driver.Env)) + assert.Equal(t, 9, len(sparkApp.Spec.Driver.Env)) assert.Equal(t, testImage, *sparkApp.Spec.Driver.Image) assert.Equal(t, flytek8s.GetServiceAccountNameFromTaskExecutionMetadata(taskCtx.TaskExecutionMetadata()), *sparkApp.Spec.Driver.ServiceAccount) assert.Equal(t, defaultConfig.DefaultPodSecurityContext, sparkApp.Spec.Driver.SecurityContenxt) @@ -890,7 +890,7 @@ func TestBuildResourcePodTemplate(t *testing.T) { assert.Equal(t, defaultConfig.DefaultEnvVars["foo"], findEnvVarByName(sparkApp.Spec.Executor.Env, "foo").Value) assert.Equal(t, defaultConfig.DefaultEnvVars["fooEnv"], findEnvVarByName(sparkApp.Spec.Executor.Env, "fooEnv").Value) assert.Equal(t, findEnvVarByName(dummyEnvVarsWithSecretRef, "SECRET"), findEnvVarByName(sparkApp.Spec.Executor.Env, "SECRET")) - assert.Equal(t, 10, len(sparkApp.Spec.Executor.Env)) + assert.Equal(t, 9, len(sparkApp.Spec.Executor.Env)) assert.Equal(t, testImage, *sparkApp.Spec.Executor.Image) assert.Equal(t, defaultConfig.DefaultPodSecurityContext, sparkApp.Spec.Executor.SecurityContenxt) assert.Equal(t, defaultConfig.DefaultPodDNSConfig, sparkApp.Spec.Executor.DNSConfig) diff --git a/flyteplugins/go/tasks/plugins/presto/execution_state.go b/flyteplugins/go/tasks/plugins/presto/execution_state.go index 3399c013ae..88edb30cb8 100644 --- a/flyteplugins/go/tasks/plugins/presto/execution_state.go +++ b/flyteplugins/go/tasks/plugins/presto/execution_state.go @@ -217,10 +217,10 @@ func GetQueryInfo(ctx context.Context, tCtx core.TaskExecutionContext) (string, } outputs, err := template.Render(ctx, []string{ - prestoQuery.RoutingGroup, - prestoQuery.Catalog, - prestoQuery.Schema, - prestoQuery.Statement, + prestoQuery.GetRoutingGroup(), + prestoQuery.GetCatalog(), + prestoQuery.GetSchema(), + prestoQuery.GetStatement(), }, template.Parameters{ TaskExecMetadata: tCtx.TaskExecutionMetadata(), Inputs: tCtx.InputReader(), @@ -241,7 +241,7 @@ func GetQueryInfo(ctx context.Context, tCtx core.TaskExecutionContext) (string, } func validatePrestoStatement(prestoJob plugins.PrestoQuery) error { - if prestoJob.Statement == "" { + if prestoJob.GetStatement() == "" { return errors.Errorf(errors.BadTaskSpecification, "Query could not be found. Please ensure that you are at least on Flytekit version 0.3.0 or later.") } @@ -440,7 +440,7 @@ func writeOutput(ctx context.Context, tCtx core.TaskExecutionContext, externalLo return err } - results := taskTemplate.Interface.Outputs.Variables["results"] + results := taskTemplate.GetInterface().GetOutputs().GetVariables()["results"] return tCtx.OutputWriter().Put(ctx, ioutils.NewInMemoryOutputReader( &pb.LiteralMap{ @@ -474,13 +474,13 @@ func MapExecutionStateToPhaseInfo(state ExecutionState) core.PhaseInfo { if state.CreationFailureCount > 5 { phaseInfo = core.PhaseInfoRetryableFailure("PrestoFailure", "Too many creation attempts", nil) } else { - phaseInfo = core.PhaseInfoRunning(uint32(3*state.QueryCount+1), ConstructTaskInfo(state)) + phaseInfo = core.PhaseInfoRunning(uint32(3*state.QueryCount+1), ConstructTaskInfo(state)) // #nosec G115 } case PhaseSubmitted: - phaseInfo = core.PhaseInfoRunning(uint32(3*state.QueryCount+2), ConstructTaskInfo(state)) + phaseInfo = core.PhaseInfoRunning(uint32(3*state.QueryCount+2), ConstructTaskInfo(state)) // #nosec G115 case PhaseQuerySucceeded: if state.QueryCount < 5 { - phaseInfo = core.PhaseInfoRunning(uint32(3*state.QueryCount+3), ConstructTaskInfo(state)) + phaseInfo = core.PhaseInfoRunning(uint32(3*state.QueryCount+3), ConstructTaskInfo(state)) // #nosec G115 } else { phaseInfo = core.PhaseInfoSuccess(ConstructTaskInfo(state)) } diff --git a/flyteplugins/go/tasks/plugins/presto/execution_state_test.go b/flyteplugins/go/tasks/plugins/presto/execution_state_test.go index 4d20d64ee6..e89f1af4ae 100644 --- a/flyteplugins/go/tasks/plugins/presto/execution_state_test.go +++ b/flyteplugins/go/tasks/plugins/presto/execution_state_test.go @@ -84,7 +84,7 @@ func TestConstructTaskLog(t *testing.T) { u, err := url.Parse(expected) assert.NoError(t, err) taskLog := ConstructTaskLog(ExecutionState{CommandID: "123", URI: u.String()}) - assert.Equal(t, expected, taskLog.Uri) + assert.Equal(t, expected, taskLog.GetUri()) } func TestConstructTaskInfo(t *testing.T) { @@ -103,7 +103,7 @@ func TestConstructTaskInfo(t *testing.T) { } taskInfo := ConstructTaskInfo(e) - assert.Equal(t, "https://prestoproxy-internal.flyteorg.net:443", taskInfo.Logs[0].Uri) + assert.Equal(t, "https://prestoproxy-internal.flyteorg.net:443", taskInfo.Logs[0].GetUri()) assert.Len(t, taskInfo.ExternalResources, 1) assert.Equal(t, taskInfo.ExternalResources[0].ExternalID, "123") } diff --git a/flyteplugins/go/tasks/plugins/presto/executions_cache.go b/flyteplugins/go/tasks/plugins/presto/executions_cache.go index cc5248c0f2..b41df763d0 100644 --- a/flyteplugins/go/tasks/plugins/presto/executions_cache.go +++ b/flyteplugins/go/tasks/plugins/presto/executions_cache.go @@ -36,7 +36,8 @@ func NewPrestoExecutionsCache( scope: scope, cfg: cfg, } - autoRefreshCache, err := cache.NewAutoRefreshCache(cfg.RefreshCacheConfig.Name, q.SyncPrestoQuery, workqueue.DefaultControllerRateLimiter(), cfg.RefreshCacheConfig.SyncPeriod.Duration, cfg.RefreshCacheConfig.Workers, cfg.RefreshCacheConfig.LruCacheSize, scope) + // #nosec G115 + autoRefreshCache, err := cache.NewAutoRefreshCache(cfg.RefreshCacheConfig.Name, q.SyncPrestoQuery, workqueue.DefaultControllerRateLimiter(), cfg.RefreshCacheConfig.SyncPeriod.Duration, uint(cfg.RefreshCacheConfig.Workers), uint(cfg.RefreshCacheConfig.LruCacheSize), scope) if err != nil { logger.Errorf(ctx, "Could not create AutoRefreshCache in Executor. [%s]", err) return q, errors.Wrapf(errors.CacheFailed, err, "Error creating AutoRefreshCache") diff --git a/flyteplugins/go/tasks/plugins/testing/echo.go b/flyteplugins/go/tasks/plugins/testing/echo.go index 09c4dc53b1..00ca339f20 100644 --- a/flyteplugins/go/tasks/plugins/testing/echo.go +++ b/flyteplugins/go/tasks/plugins/testing/echo.go @@ -104,7 +104,7 @@ func copyInputsToOutputs(ctx context.Context, tCtx core.TaskExecutionContext) (c outputLiterals := make(map[string]*idlcore.Literal, len(inputToOutputVariableMappings)) for inputVariableName, outputVariableName := range inputToOutputVariableMappings { - outputLiterals[outputVariableName] = inputLiterals.Literals[inputVariableName] + outputLiterals[outputVariableName] = inputLiterals.GetLiterals()[inputVariableName] } outputLiteralMap := &idlcore.LiteralMap{ @@ -132,12 +132,12 @@ func compileInputToOutputVariableMappings(ctx context.Context, tCtx core.TaskExe } var inputs, outputs map[string]*idlcore.Variable - if taskTemplate.Interface != nil { - if taskTemplate.Interface.Inputs != nil { - inputs = taskTemplate.Interface.Inputs.Variables + if taskTemplate.GetInterface() != nil { + if taskTemplate.GetInterface().GetInputs() != nil { + inputs = taskTemplate.GetInterface().GetInputs().GetVariables() } - if taskTemplate.Interface.Outputs != nil { - outputs = taskTemplate.Interface.Outputs.Variables + if taskTemplate.GetInterface().GetOutputs() != nil { + outputs = taskTemplate.GetInterface().GetOutputs().GetVariables() } } diff --git a/flyteplugins/go/tasks/plugins/webapi/agent/client.go b/flyteplugins/go/tasks/plugins/webapi/agent/client.go index 148113fb38..04c464eaa3 100644 --- a/flyteplugins/go/tasks/plugins/webapi/agent/client.go +++ b/flyteplugins/go/tasks/plugins/webapi/agent/client.go @@ -130,16 +130,16 @@ func getAgentRegistry(ctx context.Context, cs *ClientSet) Registry { agentSupportedTaskCategories := make(map[string]struct{}) for _, agent := range res.GetAgents() { - deprecatedSupportedTaskTypes := agent.SupportedTaskTypes + deprecatedSupportedTaskTypes := agent.GetSupportedTaskTypes() for _, supportedTaskType := range deprecatedSupportedTaskTypes { - agent := &Agent{AgentDeployment: agentDeployment, IsSync: agent.IsSync} + agent := &Agent{AgentDeployment: agentDeployment, IsSync: agent.GetIsSync()} newAgentRegistry[supportedTaskType] = map[int32]*Agent{defaultTaskTypeVersion: agent} agentSupportedTaskCategories[supportedTaskType] = struct{}{} } - supportedTaskCategories := agent.SupportedTaskCategories + supportedTaskCategories := agent.GetSupportedTaskCategories() for _, supportedCategory := range supportedTaskCategories { - agent := &Agent{AgentDeployment: agentDeployment, IsSync: agent.IsSync} + agent := &Agent{AgentDeployment: agentDeployment, IsSync: agent.GetIsSync()} supportedCategoryName := supportedCategory.GetName() newAgentRegistry[supportedCategoryName] = map[int32]*Agent{supportedCategory.GetVersion(): agent} agentSupportedTaskCategories[supportedCategoryName] = struct{}{} diff --git a/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go b/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go index ba74fbf5d2..5348b71ebb 100644 --- a/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go +++ b/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go @@ -261,7 +261,7 @@ func newMockAsyncAgentPlugin() webapi.PluginEntry { mockCreateRequestMatcher := mock.MatchedBy(func(request *admin.CreateTaskRequest) bool { expectedArgs := []string{"pyflyte-fast-execute", "--output-prefix", "/tmp/123"} - return slices.Equal(request.Template.GetContainer().Args, expectedArgs) + return slices.Equal(request.GetTemplate().GetContainer().GetArgs(), expectedArgs) }) asyncAgentClient.On("CreateTask", mock.Anything, mockCreateRequestMatcher).Return(&admin.CreateTaskResponse{ ResourceMeta: []byte{1, 2, 3, 4}}, nil) diff --git a/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go b/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go index 4fffe2bee5..4f518ced55 100644 --- a/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go @@ -45,6 +45,7 @@ type ResourceWrapper struct { Message string LogLinks []*flyteIdl.TaskLog CustomInfo *structpb.Struct + AgentError *admin.AgentError } // IsTerminal is used to avoid making network calls to the agent service if the resource is already in a terminal state. @@ -79,11 +80,11 @@ func (p *Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContext webapi.Resource, error) { taskTemplate, err := taskCtx.TaskReader().Read(ctx) if err != nil { - return nil, nil, err + return nil, nil, fmt.Errorf("failed to read task template with error: %v", err) } inputs, err := taskCtx.InputReader().Get(ctx) if err != nil { - return nil, nil, err + return nil, nil, fmt.Errorf("failed to read inputs with error: %v", err) } var argTemplate []string @@ -94,10 +95,10 @@ func (p *Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContext OutputPath: taskCtx.OutputWriter(), Task: taskCtx.TaskReader(), } - argTemplate = taskTemplate.GetContainer().Args - modifiedArgs, err := template.Render(ctx, taskTemplate.GetContainer().Args, templateParameters) + argTemplate = taskTemplate.GetContainer().GetArgs() + modifiedArgs, err := template.Render(ctx, taskTemplate.GetContainer().GetArgs(), templateParameters) if err != nil { - return nil, nil, err + return nil, nil, fmt.Errorf("failed to render args with error: %v", err) } taskTemplate.GetContainer().Args = modifiedArgs defer func() { @@ -107,7 +108,7 @@ func (p *Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContext } outputPrefix := taskCtx.OutputWriter().GetOutputPrefixPath().String() - taskCategory := admin.TaskCategory{Name: taskTemplate.Type, Version: taskTemplate.TaskTypeVersion} + taskCategory := admin.TaskCategory{Name: taskTemplate.GetType(), Version: taskTemplate.GetTaskTypeVersion()} agent, isSync := p.getFinalAgent(&taskCategory, p.cfg) taskExecutionMetadata := buildTaskExecutionMetadata(taskCtx.TaskExecutionMetadata()) @@ -134,7 +135,7 @@ func (p *Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContext request := &admin.CreateTaskRequest{Inputs: inputs, Template: taskTemplate, OutputPrefix: outputPrefix, TaskExecutionMetadata: &taskExecutionMetadata} res, err := client.CreateTask(finalCtx, request) if err != nil { - return nil, nil, err + return nil, nil, fmt.Errorf("failed to create task from agent with %v", err) } return ResourceMetaWrapper{ @@ -152,7 +153,8 @@ func (p *Plugin) ExecuteTaskSync( ) (webapi.ResourceMeta, webapi.Resource, error) { stream, err := client.ExecuteTaskSync(ctx) if err != nil { - return nil, nil, err + logger.Errorf(ctx, "failed to execute task from agent with %v", err) + return nil, nil, fmt.Errorf("failed to execute task from agent with %v", err) } headerProto := &admin.ExecuteTaskSyncRequest{ @@ -184,8 +186,8 @@ func (p *Plugin) ExecuteTaskSync( in, err := stream.Recv() if err != nil { - logger.Errorf(ctx, "failed to write output with err %s", err.Error()) - return nil, nil, err + logger.Errorf(ctx, "failed to receive stream from server %s", err.Error()) + return nil, nil, fmt.Errorf("failed to receive stream from server %w", err) } if in.GetHeader() == nil { return nil, nil, fmt.Errorf("expected header in the response, but got none") @@ -195,12 +197,13 @@ func (p *Plugin) ExecuteTaskSync( resource := in.GetHeader().GetResource() return nil, ResourceWrapper{ - Phase: resource.Phase, - Outputs: resource.Outputs, - Message: resource.Message, - LogLinks: resource.LogLinks, - CustomInfo: resource.CustomInfo, - }, err + Phase: resource.GetPhase(), + Outputs: resource.GetOutputs(), + Message: resource.GetMessage(), + LogLinks: resource.GetLogLinks(), + CustomInfo: resource.GetCustomInfo(), + AgentError: resource.GetAgentError(), + }, nil } func (p *Plugin) Get(ctx context.Context, taskCtx webapi.GetContext) (latest webapi.Resource, err error) { @@ -215,22 +218,22 @@ func (p *Plugin) Get(ctx context.Context, taskCtx webapi.GetContext) (latest web defer cancel() request := &admin.GetTaskRequest{ - TaskType: metadata.TaskCategory.Name, + TaskType: metadata.TaskCategory.GetName(), TaskCategory: &metadata.TaskCategory, ResourceMeta: metadata.AgentResourceMeta, } res, err := client.GetTask(finalCtx, request) if err != nil { - return nil, err + return nil, fmt.Errorf("failed to get task from agent with %v", err) } return ResourceWrapper{ - Phase: res.Resource.Phase, - State: res.Resource.State, - Outputs: res.Resource.Outputs, - Message: res.Resource.Message, - LogLinks: res.Resource.LogLinks, - CustomInfo: res.Resource.CustomInfo, + Phase: res.GetResource().GetPhase(), + State: res.GetResource().GetState(), + Outputs: res.GetResource().GetOutputs(), + Message: res.GetResource().GetMessage(), + LogLinks: res.GetResource().GetLogLinks(), + CustomInfo: res.GetResource().GetCustomInfo(), }, nil } @@ -249,17 +252,21 @@ func (p *Plugin) Delete(ctx context.Context, taskCtx webapi.DeleteContext) error defer cancel() request := &admin.DeleteTaskRequest{ - TaskType: metadata.TaskCategory.Name, + TaskType: metadata.TaskCategory.GetName(), TaskCategory: &metadata.TaskCategory, ResourceMeta: metadata.AgentResourceMeta, } _, err = client.DeleteTask(finalCtx, request) - return err + return fmt.Errorf("failed to delete task from agent with %v", err) } func (p *Plugin) Status(ctx context.Context, taskCtx webapi.StatusContext) (phase core.PhaseInfo, err error) { resource := taskCtx.Resource().(ResourceWrapper) taskInfo := &core.TaskInfo{Logs: resource.LogLinks, CustomInfo: resource.CustomInfo} + errorCode := pluginErrors.TaskFailedWithError + if resource.AgentError != nil && resource.AgentError.GetCode() != "" { + errorCode = resource.AgentError.GetCode() + } switch resource.Phase { case flyteIdl.TaskExecution_QUEUED: @@ -278,11 +285,10 @@ func (p *Plugin) Status(ctx context.Context, taskCtx webapi.StatusContext) (phas } return core.PhaseInfoSuccess(taskInfo), nil case flyteIdl.TaskExecution_ABORTED: - return core.PhaseInfoFailure(pluginErrors.TaskFailedWithError, "failed to run the job with aborted phase.\n"+resource.Message, taskInfo), nil + return core.PhaseInfoFailure(errorCode, "failed to run the job with aborted phase.", taskInfo), nil case flyteIdl.TaskExecution_FAILED: - return core.PhaseInfoFailure(pluginErrors.TaskFailedWithError, "failed to run the job.\n"+resource.Message, taskInfo), nil + return core.PhaseInfoFailure(errorCode, fmt.Sprintf("failed to run the job: %s", resource.Message), taskInfo), nil } - // The default phase is undefined. if resource.Phase != flyteIdl.TaskExecution_UNDEFINED { return core.PhaseInfoUndefined, pluginErrors.Errorf(core.SystemErrorCode, "unknown execution phase [%v].", resource.Phase) @@ -302,7 +308,7 @@ func (p *Plugin) Status(ctx context.Context, taskCtx webapi.StatusContext) (phas err = writeOutput(ctx, taskCtx, resource.Outputs) if err != nil { logger.Errorf(ctx, "failed to write output with err %s", err.Error()) - return core.PhaseInfoUndefined, err + return core.PhaseInfoUndefined, fmt.Errorf("failed to write output with err %s", err.Error()) } return core.PhaseInfoSuccess(taskInfo), nil } @@ -350,7 +356,7 @@ func (p *Plugin) getFinalAgent(taskCategory *admin.TaskCategory, cfg *Config) (* p.mu.RLock() defer p.mu.RUnlock() - if agent, exists := p.registry[taskCategory.Name][taskCategory.Version]; exists { + if agent, exists := p.registry[taskCategory.GetName()][taskCategory.GetVersion()]; exists { return agent.AgentDeployment, agent.IsSync } return &cfg.DefaultAgent, false @@ -362,7 +368,7 @@ func writeOutput(ctx context.Context, taskCtx webapi.StatusContext, outputs *fly return err } - if taskTemplate.Interface == nil || taskTemplate.Interface.Outputs == nil || taskTemplate.Interface.Outputs.Variables == nil { + if taskTemplate.GetInterface() == nil || taskTemplate.GetInterface().GetOutputs() == nil || taskTemplate.Interface.Outputs.Variables == nil { logger.Debugf(ctx, "The task declares no outputs. Skipping writing the outputs.") return nil } @@ -388,7 +394,7 @@ func buildTaskExecutionMetadata(taskExecutionMetadata core.TaskExecutionMetadata Annotations: taskExecutionMetadata.GetAnnotations(), K8SServiceAccount: taskExecutionMetadata.GetK8sServiceAccount(), EnvironmentVariables: taskExecutionMetadata.GetEnvironmentVariables(), - Identity: taskExecutionMetadata.GetSecurityContext().RunAs, + Identity: taskExecutionMetadata.GetSecurityContext().RunAs, // nolint:protogetter } } diff --git a/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go b/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go index 9e8c97903e..546ef59712 100644 --- a/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go +++ b/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go @@ -12,9 +12,9 @@ import ( agentMocks "github.com/flyteorg/flyte/flyteidl/clients/go/admin/mocks" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin" - flyteIdl "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" flyteIdlCore "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" + pluginErrors "github.com/flyteorg/flyte/flyteplugins/go/tasks/errors" pluginsCore "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core" pluginCoreMocks "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core/mocks" webapiPlugin "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/webapi/mocks" @@ -180,7 +180,7 @@ func TestPlugin(t *testing.T) { t.Run("test TaskExecution_UNDEFINED Status", func(t *testing.T) { taskContext := new(webapiPlugin.StatusContext) taskContext.On("Resource").Return(ResourceWrapper{ - Phase: flyteIdl.TaskExecution_UNDEFINED, + Phase: flyteIdlCore.TaskExecution_UNDEFINED, Outputs: nil, Message: "", LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, @@ -194,7 +194,7 @@ func TestPlugin(t *testing.T) { t.Run("test TaskExecution_QUEUED Status", func(t *testing.T) { taskContext := new(webapiPlugin.StatusContext) taskContext.On("Resource").Return(ResourceWrapper{ - Phase: flyteIdl.TaskExecution_QUEUED, + Phase: flyteIdlCore.TaskExecution_QUEUED, Outputs: nil, Message: "", LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, @@ -208,7 +208,7 @@ func TestPlugin(t *testing.T) { t.Run("test TaskExecution_WAITING_FOR_RESOURCES Status", func(t *testing.T) { taskContext := new(webapiPlugin.StatusContext) taskContext.On("Resource").Return(ResourceWrapper{ - Phase: flyteIdl.TaskExecution_WAITING_FOR_RESOURCES, + Phase: flyteIdlCore.TaskExecution_WAITING_FOR_RESOURCES, Outputs: nil, Message: "", LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, @@ -222,7 +222,7 @@ func TestPlugin(t *testing.T) { t.Run("test TaskExecution_INITIALIZING Status", func(t *testing.T) { taskContext := new(webapiPlugin.StatusContext) taskContext.On("Resource").Return(ResourceWrapper{ - Phase: flyteIdl.TaskExecution_INITIALIZING, + Phase: flyteIdlCore.TaskExecution_INITIALIZING, Outputs: nil, Message: "", LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, @@ -236,7 +236,7 @@ func TestPlugin(t *testing.T) { t.Run("test TaskExecution_RUNNING Status", func(t *testing.T) { taskContext := new(webapiPlugin.StatusContext) taskContext.On("Resource").Return(ResourceWrapper{ - Phase: flyteIdl.TaskExecution_RUNNING, + Phase: flyteIdlCore.TaskExecution_RUNNING, Outputs: nil, Message: "", LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, @@ -250,7 +250,7 @@ func TestPlugin(t *testing.T) { t.Run("test TaskExecution_ABORTED Status", func(t *testing.T) { taskContext := new(webapiPlugin.StatusContext) taskContext.On("Resource").Return(ResourceWrapper{ - Phase: flyteIdl.TaskExecution_ABORTED, + Phase: flyteIdlCore.TaskExecution_ABORTED, Outputs: nil, Message: "", LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, @@ -264,7 +264,26 @@ func TestPlugin(t *testing.T) { t.Run("test TaskExecution_FAILED Status", func(t *testing.T) { taskContext := new(webapiPlugin.StatusContext) taskContext.On("Resource").Return(ResourceWrapper{ - Phase: flyteIdl.TaskExecution_FAILED, + Phase: flyteIdlCore.TaskExecution_FAILED, + Outputs: nil, + Message: "boom", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + AgentError: &admin.AgentError{ + Code: "ERROR: 500", + }, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhasePermanentFailure, phase.Phase()) + assert.Equal(t, "ERROR: 500", phase.Err().GetCode()) + assert.Equal(t, "failed to run the job: boom", phase.Err().GetMessage()) + }) + + t.Run("test TaskExecution_FAILED Status Without Agent Error", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdlCore.TaskExecution_FAILED, Outputs: nil, Message: "", LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, @@ -273,6 +292,7 @@ func TestPlugin(t *testing.T) { phase, err := plugin.Status(context.Background(), taskContext) assert.NoError(t, err) assert.Equal(t, pluginsCore.PhasePermanentFailure, phase.Phase()) + assert.Equal(t, pluginErrors.TaskFailedWithError, phase.Err().GetCode()) }) t.Run("test UNDEFINED Phase", func(t *testing.T) { diff --git a/flyteplugins/go/tasks/plugins/webapi/athena/utils.go b/flyteplugins/go/tasks/plugins/webapi/athena/utils.go index 761e81842a..1ed1fbaea4 100644 --- a/flyteplugins/go/tasks/plugins/webapi/athena/utils.go +++ b/flyteplugins/go/tasks/plugins/webapi/athena/utils.go @@ -19,12 +19,12 @@ func writeOutput(ctx context.Context, tCtx webapi.StatusContext, externalLocatio return err } - if taskTemplate.Interface == nil || taskTemplate.Interface.Outputs == nil || taskTemplate.Interface.Outputs.Variables == nil { + if taskTemplate.GetInterface() == nil || taskTemplate.GetInterface().GetOutputs() == nil || taskTemplate.Interface.Outputs.Variables == nil { logger.Infof(ctx, "The task declares no outputs. Skipping writing the outputs.") return nil } - resultsSchema, exists := taskTemplate.Interface.Outputs.Variables["results"] + resultsSchema, exists := taskTemplate.GetInterface().GetOutputs().GetVariables()["results"] if !exists { logger.Infof(ctx, "The task declares no outputs. Skipping writing the outputs.") return nil @@ -56,11 +56,11 @@ type QueryInfo struct { } func validateHiveQuery(hiveQuery pluginsIdl.QuboleHiveJob) error { - if hiveQuery.Query == nil { + if hiveQuery.GetQuery() == nil { return errors.Errorf(errors.BadTaskSpecification, "Query is a required field.") } - if len(hiveQuery.Query.Query) == 0 { + if len(hiveQuery.GetQuery().GetQuery()) == 0 { return errors.Errorf(errors.BadTaskSpecification, "Query statement is a required field.") } @@ -68,7 +68,7 @@ func validateHiveQuery(hiveQuery pluginsIdl.QuboleHiveJob) error { } func validatePrestoQuery(prestoQuery pluginsIdl.PrestoQuery) error { - if len(prestoQuery.Statement) == 0 { + if len(prestoQuery.GetStatement()) == 0 { return errors.Errorf(errors.BadTaskSpecification, "Statement is a required field.") } @@ -81,7 +81,7 @@ func extractQueryInfo(ctx context.Context, tCtx webapi.TaskExecutionContextReade return QueryInfo{}, err } - switch task.Type { + switch task.GetType() { case "hive": custom := task.GetCustom() hiveQuery := pluginsIdl.QuboleHiveJob{} @@ -95,8 +95,8 @@ func extractQueryInfo(ctx context.Context, tCtx webapi.TaskExecutionContextReade } outputs, err := template.Render(ctx, []string{ - hiveQuery.Query.Query, - hiveQuery.ClusterLabel, + hiveQuery.GetQuery().GetQuery(), + hiveQuery.GetClusterLabel(), }, template.Parameters{ TaskExecMetadata: tCtx.TaskExecutionMetadata(), Inputs: tCtx.InputReader(), @@ -124,10 +124,10 @@ func extractQueryInfo(ctx context.Context, tCtx webapi.TaskExecutionContextReade } outputs, err := template.Render(ctx, []string{ - prestoQuery.RoutingGroup, - prestoQuery.Catalog, - prestoQuery.Schema, - prestoQuery.Statement, + prestoQuery.GetRoutingGroup(), + prestoQuery.GetCatalog(), + prestoQuery.GetSchema(), + prestoQuery.GetStatement(), }, template.Parameters{ TaskExecMetadata: tCtx.TaskExecutionMetadata(), Inputs: tCtx.InputReader(), @@ -146,5 +146,5 @@ func extractQueryInfo(ctx context.Context, tCtx webapi.TaskExecutionContextReade }, nil } - return QueryInfo{}, errors.Errorf(ErrUser, "Unexpected task type [%v].", task.Type) + return QueryInfo{}, errors.Errorf(ErrUser, "Unexpected task type [%v].", task.GetType()) } diff --git a/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go b/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go index ad7da5f042..fca1eee954 100644 --- a/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go @@ -95,17 +95,17 @@ func (p Plugin) createImpl(ctx context.Context, taskCtx webapi.TaskExecutionCont return nil, nil, pluginErrors.Wrapf(pluginErrors.RuntimeFailure, err, "unable to get bigquery client") } - if taskTemplate.Type == bigqueryQueryJobTask { + if taskTemplate.GetType() == bigqueryQueryJobTask { job, err = createQueryJob(jobID, taskTemplate.GetCustom(), inputs) } else { - err = pluginErrors.Errorf(pluginErrors.BadTaskSpecification, "unexpected task type [%v]", taskTemplate.Type) + err = pluginErrors.Errorf(pluginErrors.BadTaskSpecification, "unexpected task type [%v]", taskTemplate.GetType()) } if err != nil { return nil, nil, err } - job.Configuration.Query.Query = taskTemplate.GetSql().Statement + job.Configuration.Query.Query = taskTemplate.GetSql().GetStatement() job.Configuration.Labels = taskCtx.TaskExecutionMetadata().GetLabels() resp, err := client.Jobs.Insert(job.JobReference.ProjectId, job).Do() @@ -317,12 +317,12 @@ func writeOutput(ctx context.Context, tCtx webapi.StatusContext, OutputLocation return err } - if taskTemplate.Interface == nil || taskTemplate.Interface.Outputs == nil || taskTemplate.Interface.Outputs.Variables == nil { + if taskTemplate.GetInterface() == nil || taskTemplate.GetInterface().GetOutputs() == nil || taskTemplate.Interface.Outputs.Variables == nil { logger.Infof(ctx, "The task declares no outputs. Skipping writing the outputs.") return nil } - resultsStructuredDatasetType, exists := taskTemplate.Interface.Outputs.Variables["results"] + resultsStructuredDatasetType, exists := taskTemplate.GetInterface().GetOutputs().GetVariables()["results"] if !exists { logger.Infof(ctx, "The task declares no outputs. Skipping writing the outputs.") return nil diff --git a/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin_test.go b/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin_test.go index 939fe0577a..8682350986 100644 --- a/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin_test.go +++ b/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin_test.go @@ -105,9 +105,9 @@ func TestOutputWriter(t *testing.T) { assert.NoError(t, err) sd := literals.GetLiterals()["results"].GetScalar().GetStructuredDataset() - assert.Equal(t, sd.Uri, outputLocation) - assert.Equal(t, sd.Metadata.GetStructuredDatasetType().Columns[0].Name, "col1") - assert.Equal(t, sd.Metadata.GetStructuredDatasetType().Columns[0].LiteralType.GetSimple(), flyteIdlCore.SimpleType_INTEGER) + assert.Equal(t, sd.GetUri(), outputLocation) + assert.Equal(t, sd.GetMetadata().GetStructuredDatasetType().GetColumns()[0].GetName(), "col1") + assert.Equal(t, sd.GetMetadata().GetStructuredDatasetType().GetColumns()[0].GetLiteralType().GetSimple(), flyteIdlCore.SimpleType_INTEGER) if ee != nil { assert.NoError(t, ds.WriteProtobuf(ctx, outputWriter.GetErrorPath(), storage.Options{}, ee)) @@ -307,9 +307,9 @@ func TestHandleErrorResult(t *testing.T) { phaseInfo := handleErrorResult(test.reason, "message", &taskInfo) assert.Equal(t, test.phase, phaseInfo.Phase()) - assert.Equal(t, test.reason, phaseInfo.Err().Code) - assert.Equal(t, test.errorKind, phaseInfo.Err().Kind) - assert.Equal(t, "message", phaseInfo.Err().Message) + assert.Equal(t, test.reason, phaseInfo.Err().GetCode()) + assert.Equal(t, test.errorKind, phaseInfo.Err().GetKind()) + assert.Equal(t, "message", phaseInfo.Err().GetMessage()) }) } } diff --git a/flyteplugins/go/tasks/plugins/webapi/bigquery/query_job.go b/flyteplugins/go/tasks/plugins/webapi/bigquery/query_job.go index 7ce788e0fe..fe558f9d0c 100644 --- a/flyteplugins/go/tasks/plugins/webapi/bigquery/query_job.go +++ b/flyteplugins/go/tasks/plugins/webapi/bigquery/query_job.go @@ -155,7 +155,7 @@ func unmarshalQueryJobConfig(structObj *structpb.Struct) (*QueryJobConfig, error } func getJobConfigurationQuery(custom *QueryJobConfig, inputs *flyteIdlCore.LiteralMap) (*bigquery.JobConfigurationQuery, error) { - queryParameters, err := getQueryParameters(inputs.Literals) + queryParameters, err := getQueryParameters(inputs.GetLiterals()) if err != nil { return nil, pluginErrors.Errorf(pluginErrors.BadTaskSpecification, "unable build query parameters [%v]", err.Error()) @@ -216,7 +216,7 @@ func getQueryParameters(literalMap map[string]*flyteIdlCore.Literal) ([]*bigquer func getQueryParameter(literal *flyteIdlCore.Literal) (*bigquery.QueryParameterType, *bigquery.QueryParameterValue, error) { if scalar := literal.GetScalar(); scalar != nil { if primitive := scalar.GetPrimitive(); primitive != nil { - switch primitive.Value.(type) { + switch primitive.GetValue().(type) { case *flyteIdlCore.Primitive_Integer: integerType := bigquery.QueryParameterType{Type: "INT64"} integerValue := bigquery.QueryParameterValue{ diff --git a/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go b/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go index 6ae9a1dbe5..d889392c59 100644 --- a/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go @@ -96,8 +96,8 @@ func (p Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContextR } // override the default token in propeller - if len(sparkJob.DatabricksToken) != 0 { - token = sparkJob.DatabricksToken + if len(sparkJob.GetDatabricksToken()) != 0 { + token = sparkJob.GetDatabricksToken() } modifiedArgs, err := template.Render(ctx, container.GetArgs(), template.Parameters{ TaskExecMetadata: taskCtx.TaskExecutionMetadata(), @@ -110,20 +110,20 @@ func (p Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContextR } databricksJob := make(map[string]interface{}) - err = utils.UnmarshalStructToObj(sparkJob.DatabricksConf, &databricksJob) + err = utils.UnmarshalStructToObj(sparkJob.GetDatabricksConf(), &databricksJob) if err != nil { - return nil, nil, fmt.Errorf("failed to unmarshal databricksJob: %v: %v", sparkJob.DatabricksConf, err) + return nil, nil, fmt.Errorf("failed to unmarshal databricksJob: %v: %v", sparkJob.GetDatabricksConf(), err) } // If "existing_cluster_id" is in databricks_job, then we don't need to set "new_cluster" // Refer the docs here: https://docs.databricks.com/en/workflows/jobs/jobs-2.0-api.html#request-structure if clusterConfig, ok := databricksJob[newCluster].(map[string]interface{}); ok { if dockerConfig, ok := clusterConfig[dockerImage].(map[string]interface{}); !ok || dockerConfig[url] == nil { - clusterConfig[dockerImage] = map[string]string{url: container.Image} + clusterConfig[dockerImage] = map[string]string{url: container.GetImage()} } - if clusterConfig[sparkConfig] == nil && len(sparkJob.SparkConf) != 0 { - clusterConfig[sparkConfig] = sparkJob.SparkConf + if clusterConfig[sparkConfig] == nil && len(sparkJob.GetSparkConf()) != 0 { + clusterConfig[sparkConfig] = sparkJob.GetSparkConf() } } databricksJob[sparkPythonTask] = map[string]interface{}{pythonFile: p.cfg.EntrypointFile, parameters: modifiedArgs} @@ -299,7 +299,7 @@ func writeOutput(ctx context.Context, taskCtx webapi.StatusContext) error { if err != nil { return err } - if taskTemplate.Interface == nil || taskTemplate.Interface.Outputs == nil || taskTemplate.Interface.Outputs.Variables == nil { + if taskTemplate.GetInterface() == nil || taskTemplate.GetInterface().GetOutputs() == nil || taskTemplate.Interface.Outputs.Variables == nil { logger.Infof(ctx, "The task declares no outputs. Skipping writing the outputs.") return nil } diff --git a/flyteplugins/go/tasks/plugins/webapi/databricks/plugin_test.go b/flyteplugins/go/tasks/plugins/webapi/databricks/plugin_test.go index 228914af93..3cdecf6872 100644 --- a/flyteplugins/go/tasks/plugins/webapi/databricks/plugin_test.go +++ b/flyteplugins/go/tasks/plugins/webapi/databricks/plugin_test.go @@ -148,7 +148,7 @@ func TestCreateTaskInfo(t *testing.T) { taskInfo := createTaskInfo("run-id", "job-id", testInstance) assert.Equal(t, 1, len(taskInfo.Logs)) - assert.Equal(t, taskInfo.Logs[0].Uri, "https://test-account.cloud.databricks.com/#job/job-id/run/run-id") - assert.Equal(t, taskInfo.Logs[0].Name, "Databricks Console") + assert.Equal(t, taskInfo.Logs[0].GetUri(), "https://test-account.cloud.databricks.com/#job/job-id/run/run-id") + assert.Equal(t, taskInfo.Logs[0].GetName(), "Databricks Console") }) } diff --git a/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go b/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go index 02bf947fd4..c0728a79a7 100644 --- a/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go @@ -84,7 +84,7 @@ func (p Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContextR config := task.GetConfig() outputs, err := template.Render(ctx, []string{ - task.GetSql().Statement, + task.GetSql().GetStatement(), }, template.Parameters{ TaskExecMetadata: taskCtx.TaskExecutionMetadata(), Inputs: taskCtx.InputReader(), diff --git a/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin_test.go b/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin_test.go index 7657a9e315..3de8f8a6b8 100644 --- a/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin_test.go +++ b/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin_test.go @@ -57,8 +57,8 @@ func TestCreateTaskInfo(t *testing.T) { taskInfo := createTaskInfo("d5493e36", "test-account") assert.Equal(t, 1, len(taskInfo.Logs)) - assert.Equal(t, taskInfo.Logs[0].Uri, "https://test-account.snowflakecomputing.com/console#/monitoring/queries/detail?queryId=d5493e36") - assert.Equal(t, taskInfo.Logs[0].Name, "Snowflake Console") + assert.Equal(t, taskInfo.Logs[0].GetUri(), "https://test-account.snowflakecomputing.com/console#/monitoring/queries/detail?queryId=d5493e36") + assert.Equal(t, taskInfo.Logs[0].GetName(), "Snowflake Console") }) } diff --git a/flytepropeller/.golangci.yml b/flytepropeller/.golangci.yml index 6d13f4a3b6..77107079d0 100644 --- a/flytepropeller/.golangci.yml +++ b/flytepropeller/.golangci.yml @@ -1,35 +1,25 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - run: skip-dirs: - pkg/client - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - gci - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unparam - unused - - varcheck - + - protogetter linters-settings: gci: custom-order: true @@ -38,6 +28,8 @@ linters-settings: - default - prefix(github.com/flyteorg) skip-generated: true + goconst: + ignore-tests: true issues: exclude: - copylocks diff --git a/flytepropeller/cmd/kubectl-flyte/cmd/compile.go b/flytepropeller/cmd/kubectl-flyte/cmd/compile.go index 056b546849..c91e10c183 100644 --- a/flytepropeller/cmd/kubectl-flyte/cmd/compile.go +++ b/flytepropeller/cmd/kubectl-flyte/cmd/compile.go @@ -76,18 +76,18 @@ func (c *CompileOpts) compileWorkflowCmd() error { if err != nil { return err } - err = ioutil.WriteFile(c.protoFile+".yaml", b, os.ModePerm) + err = os.WriteFile(c.protoFile+".yaml", b, os.ModePerm) // #nosec G306 if err != nil { return err } } - compiledTasks, err := compileTasks(wfClosure.Tasks) + compiledTasks, err := compileTasks(wfClosure.GetTasks()) if err != nil { return err } - compileWfClosure, err := compiler.CompileWorkflow(wfClosure.Workflow, []*core.WorkflowTemplate{}, compiledTasks, []common.InterfaceProvider{}) + compileWfClosure, err := compiler.CompileWorkflow(wfClosure.GetWorkflow(), []*core.WorkflowTemplate{}, compiledTasks, []common.InterfaceProvider{}) if err != nil { return err } @@ -100,7 +100,7 @@ func (c *CompileOpts) compileWorkflowCmd() error { } if c.outputPath != "" { - return ioutil.WriteFile(c.outputPath, o, os.ModePerm) + return os.WriteFile(c.outputPath, o, os.ModePerm) // #nosec G306 } fmt.Printf("%v", string(o)) return nil diff --git a/flytepropeller/cmd/kubectl-flyte/cmd/create.go b/flytepropeller/cmd/kubectl-flyte/cmd/create.go index 2feeb8ec8e..3cf463b604 100644 --- a/flytepropeller/cmd/kubectl-flyte/cmd/create.go +++ b/flytepropeller/cmd/kubectl-flyte/cmd/create.go @@ -160,12 +160,12 @@ func (c *CreateOpts) createWorkflowFromProto() error { return err } - compiledTasks, err := compileTasks(wfClosure.Tasks) + compiledTasks, err := compileTasks(wfClosure.GetTasks()) if err != nil { return err } - wf, err := compiler.CompileWorkflow(wfClosure.Workflow, []*core.WorkflowTemplate{}, compiledTasks, []common.InterfaceProvider{}) + wf, err := compiler.CompileWorkflow(wfClosure.GetWorkflow(), []*core.WorkflowTemplate{}, compiledTasks, []common.InterfaceProvider{}) if err != nil { return err } @@ -182,8 +182,8 @@ func (c *CreateOpts) createWorkflowFromProto() error { if len(c.execID) > 0 { executionID = &core.WorkflowExecutionIdentifier{ Name: c.execID, - Domain: wfClosure.Workflow.Id.Domain, - Project: wfClosure.Workflow.Id.Project, + Domain: wfClosure.GetWorkflow().GetId().GetDomain(), + Project: wfClosure.GetWorkflow().GetId().GetProject(), } } diff --git a/flytepropeller/cmd/kubectl-flyte/cmd/create_test.go b/flytepropeller/cmd/kubectl-flyte/cmd/create_test.go index 5036201482..65bb2ecae1 100644 --- a/flytepropeller/cmd/kubectl-flyte/cmd/create_test.go +++ b/flytepropeller/cmd/kubectl-flyte/cmd/create_test.go @@ -3,7 +3,6 @@ package cmd import ( "encoding/json" "flag" - "io/ioutil" "os" "path/filepath" "testing" @@ -113,7 +112,7 @@ func generateSimpleWorkflow(t *testing.T) { marshaller := &jsonpb.Marshaler{} s, err := marshaller.MarshalToString(&closure) assert.NoError(t, err) - assert.NoError(t, ioutil.WriteFile(filepath.Join("testdata", "workflow.json.golden"), []byte(s), os.ModePerm)) + assert.NoError(t, os.WriteFile(filepath.Join("testdata", "workflow.json.golden"), []byte(s), os.ModePerm)) // #nosec G306 m := map[string]interface{}{} err = json.Unmarshal([]byte(s), &m) @@ -121,11 +120,11 @@ func generateSimpleWorkflow(t *testing.T) { b, err := yaml.Marshal(m) assert.NoError(t, err) - assert.NoError(t, ioutil.WriteFile(filepath.Join("testdata", "workflow.yaml.golden"), b, os.ModePerm)) + assert.NoError(t, os.WriteFile(filepath.Join("testdata", "workflow.yaml.golden"), b, os.ModePerm)) // #nosec G306 raw, err := proto.Marshal(&closure) assert.NoError(t, err) - assert.NoError(t, ioutil.WriteFile(filepath.Join("testdata", "workflow.pb.golden"), raw, os.ModePerm)) + assert.NoError(t, os.WriteFile(filepath.Join("testdata", "workflow.pb.golden"), raw, os.ModePerm)) // #nosec G306 } func generateWorkflowWithInputs(t *testing.T) { @@ -242,7 +241,7 @@ func marshalGolden(t *testing.T, message proto.Message, filename string) { marshaller := &jsonpb.Marshaler{} s, err := marshaller.MarshalToString(message) assert.NoError(t, err) - assert.NoError(t, ioutil.WriteFile(filepath.Join("testdata", filename+".json.golden"), []byte(s), os.ModePerm)) + assert.NoError(t, os.WriteFile(filepath.Join("testdata", filename+".json.golden"), []byte(s), os.ModePerm)) // #nosec G306 m := map[string]interface{}{} err = json.Unmarshal([]byte(s), &m) @@ -250,28 +249,28 @@ func marshalGolden(t *testing.T, message proto.Message, filename string) { b, err := yaml.Marshal(m) assert.NoError(t, err) - assert.NoError(t, ioutil.WriteFile(filepath.Join("testdata", filename+".yaml.golden"), b, os.ModePerm)) + assert.NoError(t, os.WriteFile(filepath.Join("testdata", filename+".yaml.golden"), b, os.ModePerm)) // #nosec G306 raw, err := proto.Marshal(message) assert.NoError(t, err) - assert.NoError(t, ioutil.WriteFile(filepath.Join("testdata", filename+".pb.golden"), raw, os.ModePerm)) + assert.NoError(t, os.WriteFile(filepath.Join("testdata", filename+".pb.golden"), raw, os.ModePerm)) // #nosec G306 } func testCompile(t *testing.T) { f := func(t *testing.T, filePath, format string) { - raw, err := ioutil.ReadFile(filepath.Join("testdata", filePath)) + raw, err := os.ReadFile(filepath.Join("testdata", filePath)) assert.NoError(t, err) wf := &core.WorkflowClosure{} err = unmarshal(raw, format, wf) assert.NoError(t, err) assert.NotNil(t, wf) - assert.Equal(t, 2, len(wf.Tasks)) - if len(wf.Tasks) == 2 { - c := wf.Tasks[0].GetContainer() + assert.Equal(t, 2, len(wf.GetTasks())) + if len(wf.GetTasks()) == 2 { + c := wf.GetTasks()[0].GetContainer() assert.NotNil(t, c) - compiledTasks, err := compileTasks(wf.Tasks) + compiledTasks, err := compileTasks(wf.GetTasks()) assert.NoError(t, err) - compiledWf, err := compiler.CompileWorkflow(wf.Workflow, []*core.WorkflowTemplate{}, compiledTasks, []common.InterfaceProvider{}) + compiledWf, err := compiler.CompileWorkflow(wf.GetWorkflow(), []*core.WorkflowTemplate{}, compiledTasks, []common.InterfaceProvider{}) assert.NoError(t, err) _, err = k8s.BuildFlyteWorkflow(compiledWf, nil, nil, "") assert.NoError(t, err) diff --git a/flytepropeller/events/admin_eventsink.go b/flytepropeller/events/admin_eventsink.go index 3da6cca421..cc9c57661c 100644 --- a/flytepropeller/events/admin_eventsink.go +++ b/flytepropeller/events/admin_eventsink.go @@ -116,17 +116,17 @@ func IDFromMessage(message proto.Message) ([]byte, error) { var id string switch eventMessage := message.(type) { case *event.WorkflowExecutionEvent: - wid := eventMessage.ExecutionId - id = fmt.Sprintf("%s:%s:%s:%d", wid.Project, wid.Domain, wid.Name, eventMessage.Phase) + wid := eventMessage.GetExecutionId() + id = fmt.Sprintf("%s:%s:%s:%d", wid.GetProject(), wid.GetDomain(), wid.GetName(), eventMessage.GetPhase()) case *event.NodeExecutionEvent: - nid := eventMessage.Id - wid := nid.ExecutionId - id = fmt.Sprintf("%s:%s:%s:%s:%s:%d", wid.Project, wid.Domain, wid.Name, nid.NodeId, eventMessage.RetryGroup, eventMessage.Phase) + nid := eventMessage.GetId() + wid := nid.GetExecutionId() + id = fmt.Sprintf("%s:%s:%s:%s:%s:%d", wid.GetProject(), wid.GetDomain(), wid.GetName(), nid.GetNodeId(), eventMessage.GetRetryGroup(), eventMessage.GetPhase()) case *event.TaskExecutionEvent: - tid := eventMessage.TaskId - nid := eventMessage.ParentNodeExecutionId - wid := nid.ExecutionId - id = fmt.Sprintf("%s:%s:%s:%s:%s:%s:%d:%d:%d", wid.Project, wid.Domain, wid.Name, nid.NodeId, tid.Name, tid.Version, eventMessage.RetryAttempt, eventMessage.Phase, eventMessage.PhaseVersion) + tid := eventMessage.GetTaskId() + nid := eventMessage.GetParentNodeExecutionId() + wid := nid.GetExecutionId() + id = fmt.Sprintf("%s:%s:%s:%s:%s:%s:%d:%d:%d", wid.GetProject(), wid.GetDomain(), wid.GetName(), nid.GetNodeId(), tid.GetName(), tid.GetVersion(), eventMessage.GetRetryAttempt(), eventMessage.GetPhase(), eventMessage.GetPhaseVersion()) default: return nil, fmt.Errorf("unknown event type [%s]", eventMessage.String()) } @@ -140,7 +140,7 @@ func initializeAdminClientFromConfig(ctx context.Context, config *Config) (clien grpcOptions := []grpcRetry.CallOption{ grpcRetry.WithBackoff(grpcRetry.BackoffExponentialWithJitter(time.Duration(config.BackoffScalar)*time.Millisecond, config.GetBackoffJitter(ctx))), - grpcRetry.WithMax(uint(config.MaxRetries)), + grpcRetry.WithMax(uint(config.MaxRetries)), // #nosec G115 } opt := grpc.WithChainUnaryInterceptor( diff --git a/flytepropeller/events/admin_eventsink_test.go b/flytepropeller/events/admin_eventsink_test.go index 510371d056..e3a0d57dba 100644 --- a/flytepropeller/events/admin_eventsink_test.go +++ b/flytepropeller/events/admin_eventsink_test.go @@ -86,7 +86,7 @@ func TestAdminWorkflowEvent(t *testing.T) { "CreateWorkflowEvent", ctx, mock.MatchedBy(func(req *admin.WorkflowExecutionEventRequest) bool { - return req.Event == wfEvent + return req.GetEvent() == wfEvent }, )).Return(&admin.WorkflowExecutionEventResponse{}, nil) @@ -104,7 +104,7 @@ func TestAdminNodeEvent(t *testing.T) { "CreateNodeEvent", ctx, mock.MatchedBy(func(req *admin.NodeExecutionEventRequest) bool { - return req.Event == nodeEvent + return req.GetEvent() == nodeEvent }), ).Return(&admin.NodeExecutionEventResponse{}, nil) @@ -122,7 +122,7 @@ func TestAdminTaskEvent(t *testing.T) { "CreateTaskEvent", ctx, mock.MatchedBy(func(req *admin.TaskExecutionEventRequest) bool { - return req.Event == taskEvent + return req.GetEvent() == taskEvent }), ).Return(&admin.TaskExecutionEventResponse{}, nil) @@ -159,7 +159,7 @@ func TestAdminRateLimitError(t *testing.T) { "CreateTaskEvent", ctx, mock.MatchedBy(func(req *admin.TaskExecutionEventRequest) bool { - return req.Event == taskEvent + return req.GetEvent() == taskEvent }), ).Return(&admin.TaskExecutionEventResponse{}, nil) diff --git a/flytepropeller/events/errors/errors.go b/flytepropeller/events/errors/errors.go index 2d3e02e0df..11c603bad8 100644 --- a/flytepropeller/events/errors/errors.go +++ b/flytepropeller/events/errors/errors.go @@ -74,7 +74,7 @@ func WrapError(err error) error { phase := reason.AlreadyInTerminalState.GetCurrentPhase() return wrapf(EventAlreadyInTerminalStateError, err, fmt.Sprintf("conflicting events; destination: %v", phase)) case *admin.EventFailureReason_IncompatibleCluster: - return wrapf(EventIncompatibleCusterError, err, fmt.Sprintf("conflicting execution cluster; expected: %v", reason.IncompatibleCluster.Cluster)) + return wrapf(EventIncompatibleCusterError, err, fmt.Sprintf("conflicting execution cluster; expected: %v", reason.IncompatibleCluster.GetCluster())) default: logger.Warnf(context.Background(), "found unexpected type in details of grpc status: %v", reason) } diff --git a/flytepropeller/events/event_recorder.go b/flytepropeller/events/event_recorder.go index 310797f081..9390e04bf4 100644 --- a/flytepropeller/events/event_recorder.go +++ b/flytepropeller/events/event_recorder.go @@ -86,8 +86,8 @@ func (r *eventRecorder) RecordWorkflowEvent(ctx context.Context, e *event.Workfl // If error message too large, truncate to mitigate grpc message size limit. Split the truncated size equally between // the beginning and the end of the message to capture the most relevant information. func truncateErrorMessage(err *core.ExecutionError, length int) { - if len(err.Message) > length { - err.Message = fmt.Sprintf("%s\n%s\n%s", err.Message[:length/2], truncationIndicator, err.Message[(len(err.Message)-length/2):]) + if len(err.GetMessage()) > length { + err.Message = fmt.Sprintf("%s\n%s\n%s", err.GetMessage()[:length/2], truncationIndicator, err.GetMessage()[(len(err.GetMessage())-length/2):]) } } diff --git a/flytepropeller/events/event_recorder_test.go b/flytepropeller/events/event_recorder_test.go index 2b633b72ff..32c1193361 100644 --- a/flytepropeller/events/event_recorder_test.go +++ b/flytepropeller/events/event_recorder_test.go @@ -96,6 +96,6 @@ func TestTruncateErrorMessage(t *testing.T) { } truncateErrorMessage(&executionError, length) - assert.True(t, len(executionError.Message) <= length+len(truncationIndicator)+2) + assert.True(t, len(executionError.GetMessage()) <= length+len(truncationIndicator)+2) } } diff --git a/flytepropeller/events/eventsink_test.go b/flytepropeller/events/eventsink_test.go index 46aa5e46db..d488398d7b 100644 --- a/flytepropeller/events/eventsink_test.go +++ b/flytepropeller/events/eventsink_test.go @@ -62,11 +62,11 @@ func TestFileEvent(t *testing.T) { taskEvent := &event.TaskExecutionEvent{ TaskId: &core.Identifier{ ResourceType: core.ResourceType_TASK, - Project: executionID.Project, - Domain: executionID.Domain, - Name: executionID.Name, + Project: executionID.GetProject(), + Domain: executionID.GetDomain(), + Name: executionID.GetName(), }, - ParentNodeExecutionId: nodeEvent.Id, + ParentNodeExecutionId: nodeEvent.GetId(), Phase: core.TaskExecution_FAILED, OccurredAt: now, } diff --git a/flytepropeller/events/local_eventsink.go b/flytepropeller/events/local_eventsink.go index fdcd5408a4..2300942bab 100644 --- a/flytepropeller/events/local_eventsink.go +++ b/flytepropeller/events/local_eventsink.go @@ -26,13 +26,13 @@ func (s *localSink) Sink(ctx context.Context, message proto.Message) error { switch e := message.(type) { case *event.WorkflowExecutionEvent: eventOutput = fmt.Sprintf("[--WF EVENT--] %s, Phase: %s, OccuredAt: %s\n", - e.ExecutionId, e.Phase, e.OccurredAt.AsTime().String()) + e.GetExecutionId(), e.GetPhase(), e.GetOccurredAt().AsTime().String()) case *event.NodeExecutionEvent: eventOutput = fmt.Sprintf("[--NODE EVENT--] %s, Phase: %s, OccuredAt: %s\n", - e.Id, e.Phase, e.OccurredAt.AsTime().String()) + e.GetId(), e.GetPhase(), e.GetOccurredAt().AsTime().String()) case *event.TaskExecutionEvent: eventOutput = fmt.Sprintf("[--TASK EVENT--] %s,%s, Phase: %s, OccuredAt: %s\n", - e.TaskId, e.ParentNodeExecutionId, e.Phase, e.OccurredAt.AsTime().String()) + e.GetTaskId(), e.GetParentNodeExecutionId(), e.GetPhase(), e.GetOccurredAt().AsTime().String()) } return s.writer.Write(ctx, eventOutput) diff --git a/flytepropeller/events/node_event_recorder.go b/flytepropeller/events/node_event_recorder.go index 8beb488ce6..c283401614 100644 --- a/flytepropeller/events/node_event_recorder.go +++ b/flytepropeller/events/node_event_recorder.go @@ -69,7 +69,7 @@ func (r *nodeEventRecorder) RecordNodeEvent(ctx context.Context, ev *event.NodeE logger.Infof(ctx, "Failed to record node event [%+v] with err: %v", ev, err) // Only attempt to retry sending an event in the case we tried to send raw output data inline if eventConfig.FallbackToOutputReference && rawOutputPolicy == config.RawOutputPolicyInline { - logger.Infof(ctx, "Falling back to sending node event outputs by reference for [%+v]", ev.Id) + logger.Infof(ctx, "Falling back to sending node event outputs by reference for [%+v]", ev.GetId()) return r.handleFailure(ctx, origEvent, err) } return err diff --git a/flytepropeller/events/node_event_recorder_test.go b/flytepropeller/events/node_event_recorder_test.go index 5d2025b525..d3321d649e 100644 --- a/flytepropeller/events/node_event_recorder_test.go +++ b/flytepropeller/events/node_event_recorder_test.go @@ -82,7 +82,7 @@ func TestRecordNodeEvent_Success_InlineOutputs(t *testing.T) { store: mockStore, } err := recorder.RecordNodeEvent(ctx, getReferenceNodeEv(), inlineEventConfig) - assert.Equal(t, deckURI, nodeEvent.DeckUri) + assert.Equal(t, deckURI, nodeEvent.GetDeckUri()) assert.NoError(t, err) } diff --git a/flytepropeller/events/task_event_recorder.go b/flytepropeller/events/task_event_recorder.go index 8b531ae85f..3882802dd4 100644 --- a/flytepropeller/events/task_event_recorder.go +++ b/flytepropeller/events/task_event_recorder.go @@ -69,7 +69,7 @@ func (r *taskEventRecorder) RecordTaskEvent(ctx context.Context, ev *event.TaskE logger.Infof(ctx, "Failed to record task event [%+v] with err: %v", ev, err) // Only attempt to retry sending an event in the case we tried to send raw output data inline if eventConfig.FallbackToOutputReference && rawOutputPolicy == config.RawOutputPolicyInline { - logger.Infof(ctx, "Falling back to sending task event outputs by reference for [%+v]", ev.TaskId) + logger.Infof(ctx, "Falling back to sending task event outputs by reference for [%+v]", ev.GetTaskId()) return r.handleFailure(ctx, origEvent, err) } return err diff --git a/flytepropeller/events/workflow_event_recorder.go b/flytepropeller/events/workflow_event_recorder.go index f0f48a7f9d..5e56799925 100644 --- a/flytepropeller/events/workflow_event_recorder.go +++ b/flytepropeller/events/workflow_event_recorder.go @@ -69,7 +69,7 @@ func (r *workflowEventRecorder) RecordWorkflowEvent(ctx context.Context, ev *eve logger.Infof(ctx, "Failed to record workflow event [%+v] with err: %v", ev, err) // Only attempt to retry sending an event in the case we tried to send raw output data inline if eventConfig.FallbackToOutputReference && rawOutputPolicy == config.RawOutputPolicyInline { - logger.Infof(ctx, "Falling back to sending workflow event outputs by reference for [%+v]", ev.ExecutionId) + logger.Infof(ctx, "Falling back to sending workflow event outputs by reference for [%+v]", ev.GetExecutionId()) return r.handleFailure(ctx, origEvent, err) } return err diff --git a/flytepropeller/go.mod b/flytepropeller/go.mod index 7e0a312abf..94679ecdfb 100644 --- a/flytepropeller/go.mod +++ b/flytepropeller/go.mod @@ -22,11 +22,14 @@ require ( github.com/mitchellh/mapstructure v1.5.0 github.com/pkg/errors v0.9.1 github.com/prometheus/client_golang v1.19.1 + github.com/santhosh-tekuri/jsonschema v1.2.4 github.com/shamaton/msgpack/v2 v2.2.2 github.com/sirupsen/logrus v1.9.3 github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.9.0 + github.com/wI2L/jsondiff v0.6.0 + gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0 go.opentelemetry.io/otel v1.24.0 go.opentelemetry.io/otel/trace v1.24.0 @@ -123,6 +126,10 @@ require ( github.com/spf13/viper v1.11.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.2.0 // indirect + github.com/tidwall/gjson v1.17.1 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/tidwall/sjson v1.2.5 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect diff --git a/flytepropeller/go.sum b/flytepropeller/go.sum index 37a8766913..63c498dc77 100644 --- a/flytepropeller/go.sum +++ b/flytepropeller/go.sum @@ -373,6 +373,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/santhosh-tekuri/jsonschema v1.2.4 h1:hNhW8e7t+H1vgY+1QeEQpveR6D4+OwKPXCfD2aieJis= +github.com/santhosh-tekuri/jsonschema v1.2.4/go.mod h1:TEAUOeZSmIxTTuHatJzrvARHiuO9LYd+cIxzgEHCQI4= github.com/shamaton/msgpack/v2 v2.2.2 h1:GOIg0c9LV04VwzOOqZSrmsv/JzjNOOMxnS/HvOHGdgs= github.com/shamaton/msgpack/v2 v2.2.2/go.mod h1:6khjYnkx73f7VQU7wjcFS9DFjs+59naVWJv1TB7qdOI= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -410,11 +412,25 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= +github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/wI2L/jsondiff v0.6.0 h1:zrsH3FbfVa3JO9llxrcDy/XLkYPLgoMX6Mz3T2PP2AI= +github.com/wI2L/jsondiff v0.6.0/go.mod h1:D6aQ5gKgPF9g17j+E9N7aasmU1O+XvfmWm1y8UMmNpw= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7 h1:BAkxmYRc1ZPl6Gap4HWqwPT8yLZMrgaAwx12Ft408sg= +gitlab.com/yvesf/json-schema-compare v0.0.0-20190604192943-a900c04201f7/go.mod h1:X40Z1OU8o1oiXWzBmkuYOaruzYGv60l0AxGiB0E9keI= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/branch_test.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/branch_test.go index 5fd2a14218..b10c704409 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/branch_test.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/branch_test.go @@ -17,7 +17,7 @@ func TestMarshalUnMarshal_BranchTask(t *testing.T) { err = json.Unmarshal(r, &o) assert.NoError(t, err) assert.NotNil(t, o.BranchNode.If) - assert.Equal(t, core.ComparisonExpression_GT, o.BranchNode.If.Condition.BooleanExpression.GetComparison().Operator) + assert.Equal(t, core.ComparisonExpression_GT, o.GetBranchNode().GetIf().GetCondition().GetComparison().GetOperator()) assert.Equal(t, 1, len(o.InputBindings)) raw, err := json.Marshal(o) if assert.NoError(t, err) { diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/identifier_test.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/identifier_test.go index 1267aec09b..b7bafaacb3 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/identifier_test.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/identifier_test.go @@ -100,16 +100,16 @@ func TestTaskExecutionIdentifier_DeepCopyInto(t *testing.T) { teIdentifierCopy := TaskExecutionIdentifier{} teIdentifier.DeepCopyInto(&teIdentifierCopy) - assert.Equal(t, teIdentifier.TaskId.ResourceType, teIdentifierCopy.TaskId.ResourceType) - assert.Equal(t, teIdentifier.TaskId.Project, teIdentifierCopy.TaskId.Project) - assert.Equal(t, teIdentifier.TaskId.Domain, teIdentifierCopy.TaskId.Domain) - assert.Equal(t, teIdentifier.TaskId.Name, teIdentifierCopy.TaskId.Name) - assert.Equal(t, teIdentifier.TaskId.Version, teIdentifierCopy.TaskId.Version) - assert.Equal(t, teIdentifier.TaskId.Org, teIdentifierCopy.TaskId.Org) - assert.Equal(t, teIdentifier.NodeExecutionId.ExecutionId.Project, teIdentifierCopy.NodeExecutionId.ExecutionId.Project) - assert.Equal(t, teIdentifier.NodeExecutionId.ExecutionId.Domain, teIdentifierCopy.NodeExecutionId.ExecutionId.Domain) - assert.Equal(t, teIdentifier.NodeExecutionId.ExecutionId.Name, teIdentifierCopy.NodeExecutionId.ExecutionId.Name) - assert.Equal(t, teIdentifier.NodeExecutionId.ExecutionId.Org, teIdentifierCopy.NodeExecutionId.ExecutionId.Org) - assert.Equal(t, teIdentifier.NodeExecutionId.NodeId, teIdentifierCopy.NodeExecutionId.NodeId) + assert.Equal(t, teIdentifier.TaskId.GetResourceType(), teIdentifierCopy.TaskId.GetResourceType()) + assert.Equal(t, teIdentifier.TaskId.GetProject(), teIdentifierCopy.TaskId.GetProject()) + assert.Equal(t, teIdentifier.TaskId.GetDomain(), teIdentifierCopy.TaskId.GetDomain()) + assert.Equal(t, teIdentifier.TaskId.GetName(), teIdentifierCopy.TaskId.GetName()) + assert.Equal(t, teIdentifier.TaskId.GetVersion(), teIdentifierCopy.TaskId.GetVersion()) + assert.Equal(t, teIdentifier.TaskId.GetOrg(), teIdentifierCopy.TaskId.GetOrg()) + assert.Equal(t, teIdentifier.NodeExecutionId.GetExecutionId().GetProject(), teIdentifierCopy.NodeExecutionId.GetExecutionId().GetProject()) + assert.Equal(t, teIdentifier.NodeExecutionId.GetExecutionId().GetDomain(), teIdentifierCopy.NodeExecutionId.GetExecutionId().GetDomain()) + assert.Equal(t, teIdentifier.NodeExecutionId.GetExecutionId().GetName(), teIdentifierCopy.NodeExecutionId.GetExecutionId().GetName()) + assert.Equal(t, teIdentifier.NodeExecutionId.GetExecutionId().GetOrg(), teIdentifierCopy.NodeExecutionId.GetExecutionId().GetOrg()) + assert.Equal(t, teIdentifier.NodeExecutionId.GetNodeId(), teIdentifierCopy.NodeExecutionId.GetNodeId()) assert.Equal(t, teIdentifier.RetryAttempt, teIdentifierCopy.RetryAttempt) } diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go index 486ac35a16..c2022dea25 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go @@ -290,6 +290,7 @@ type ExecutableArrayNodeStatus interface { GetSubNodeTaskPhases() bitarray.CompactArray GetSubNodeRetryAttempts() bitarray.CompactArray GetSubNodeSystemFailures() bitarray.CompactArray + GetSubNodeDeltaTimestamps() bitarray.CompactArray GetTaskPhaseVersion() uint32 } @@ -302,6 +303,7 @@ type MutableArrayNodeStatus interface { SetSubNodeTaskPhases(subNodeTaskPhases bitarray.CompactArray) SetSubNodeRetryAttempts(subNodeRetryAttempts bitarray.CompactArray) SetSubNodeSystemFailures(subNodeSystemFailures bitarray.CompactArray) + SetSubNodeDeltaTimestamps(subNodeDeltaTimestamps bitarray.CompactArray) SetTaskPhaseVersion(taskPhaseVersion uint32) } diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableArrayNodeStatus.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableArrayNodeStatus.go index f4cce3e643..4aee51f044 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableArrayNodeStatus.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableArrayNodeStatus.go @@ -82,6 +82,38 @@ func (_m *ExecutableArrayNodeStatus) GetExecutionError() *core.ExecutionError { return r0 } +type ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps struct { + *mock.Call +} + +func (_m ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps) Return(_a0 bitarray.CompactArray) *ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps { + return &ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps{Call: _m.Call.Return(_a0)} +} + +func (_m *ExecutableArrayNodeStatus) OnGetSubNodeDeltaTimestamps() *ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps { + c_call := _m.On("GetSubNodeDeltaTimestamps") + return &ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps{Call: c_call} +} + +func (_m *ExecutableArrayNodeStatus) OnGetSubNodeDeltaTimestampsMatch(matchers ...interface{}) *ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps { + c_call := _m.On("GetSubNodeDeltaTimestamps", matchers...) + return &ExecutableArrayNodeStatus_GetSubNodeDeltaTimestamps{Call: c_call} +} + +// GetSubNodeDeltaTimestamps provides a mock function with given fields: +func (_m *ExecutableArrayNodeStatus) GetSubNodeDeltaTimestamps() bitarray.CompactArray { + ret := _m.Called() + + var r0 bitarray.CompactArray + if rf, ok := ret.Get(0).(func() bitarray.CompactArray); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(bitarray.CompactArray) + } + + return r0 +} + type ExecutableArrayNodeStatus_GetSubNodePhases struct { *mock.Call } diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableArrayNodeStatus.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableArrayNodeStatus.go index c20f80e349..1e081e20ba 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableArrayNodeStatus.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableArrayNodeStatus.go @@ -82,6 +82,38 @@ func (_m *MutableArrayNodeStatus) GetExecutionError() *core.ExecutionError { return r0 } +type MutableArrayNodeStatus_GetSubNodeDeltaTimestamps struct { + *mock.Call +} + +func (_m MutableArrayNodeStatus_GetSubNodeDeltaTimestamps) Return(_a0 bitarray.CompactArray) *MutableArrayNodeStatus_GetSubNodeDeltaTimestamps { + return &MutableArrayNodeStatus_GetSubNodeDeltaTimestamps{Call: _m.Call.Return(_a0)} +} + +func (_m *MutableArrayNodeStatus) OnGetSubNodeDeltaTimestamps() *MutableArrayNodeStatus_GetSubNodeDeltaTimestamps { + c_call := _m.On("GetSubNodeDeltaTimestamps") + return &MutableArrayNodeStatus_GetSubNodeDeltaTimestamps{Call: c_call} +} + +func (_m *MutableArrayNodeStatus) OnGetSubNodeDeltaTimestampsMatch(matchers ...interface{}) *MutableArrayNodeStatus_GetSubNodeDeltaTimestamps { + c_call := _m.On("GetSubNodeDeltaTimestamps", matchers...) + return &MutableArrayNodeStatus_GetSubNodeDeltaTimestamps{Call: c_call} +} + +// GetSubNodeDeltaTimestamps provides a mock function with given fields: +func (_m *MutableArrayNodeStatus) GetSubNodeDeltaTimestamps() bitarray.CompactArray { + ret := _m.Called() + + var r0 bitarray.CompactArray + if rf, ok := ret.Get(0).(func() bitarray.CompactArray); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(bitarray.CompactArray) + } + + return r0 +} + type MutableArrayNodeStatus_GetSubNodePhases struct { *mock.Call } @@ -284,6 +316,11 @@ func (_m *MutableArrayNodeStatus) SetExecutionError(executionError *core.Executi _m.Called(executionError) } +// SetSubNodeDeltaTimestamps provides a mock function with given fields: subNodeDeltaTimestamps +func (_m *MutableArrayNodeStatus) SetSubNodeDeltaTimestamps(subNodeDeltaTimestamps bitarray.CompactArray) { + _m.Called(subNodeDeltaTimestamps) +} + // SetSubNodePhases provides a mock function with given fields: subNodePhases func (_m *MutableArrayNodeStatus) SetSubNodePhases(subNodePhases bitarray.CompactArray) { _m.Called(subNodePhases) diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go index 218b045588..c27a8560fc 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go @@ -230,13 +230,14 @@ const ( type ArrayNodeStatus struct { MutableStruct - Phase ArrayNodePhase `json:"phase,omitempty"` - ExecutionError *core.ExecutionError `json:"executionError,omitempty"` - SubNodePhases bitarray.CompactArray `json:"subphase,omitempty"` - SubNodeTaskPhases bitarray.CompactArray `json:"subtphase,omitempty"` - SubNodeRetryAttempts bitarray.CompactArray `json:"subattempts,omitempty"` - SubNodeSystemFailures bitarray.CompactArray `json:"subsysfailures,omitempty"` - TaskPhaseVersion uint32 `json:"taskPhaseVersion,omitempty"` + Phase ArrayNodePhase `json:"phase,omitempty"` + ExecutionError *core.ExecutionError `json:"executionError,omitempty"` + SubNodePhases bitarray.CompactArray `json:"subphase,omitempty"` + SubNodeTaskPhases bitarray.CompactArray `json:"subtphase,omitempty"` + SubNodeRetryAttempts bitarray.CompactArray `json:"subattempts,omitempty"` + SubNodeSystemFailures bitarray.CompactArray `json:"subsysfailures,omitempty"` + SubNodeDeltaTimestamps bitarray.CompactArray `json:"subtimestamps,omitempty"` + TaskPhaseVersion uint32 `json:"taskPhaseVersion,omitempty"` } func (in *ArrayNodeStatus) GetArrayNodePhase() ArrayNodePhase { @@ -305,6 +306,17 @@ func (in *ArrayNodeStatus) SetSubNodeSystemFailures(subNodeSystemFailures bitarr } } +func (in *ArrayNodeStatus) GetSubNodeDeltaTimestamps() bitarray.CompactArray { + return in.SubNodeDeltaTimestamps +} + +func (in *ArrayNodeStatus) SetSubNodeDeltaTimestamps(subNodeDeltaTimestamps bitarray.CompactArray) { + if in.SubNodeDeltaTimestamps != subNodeDeltaTimestamps { + in.SetDirty() + in.SubNodeDeltaTimestamps = subNodeDeltaTimestamps + } +} + func (in *ArrayNodeStatus) GetTaskPhaseVersion() uint32 { return in.TaskPhaseVersion } diff --git a/flytepropeller/pkg/compiler/admin.go b/flytepropeller/pkg/compiler/admin.go index cceab67f67..94646c95d2 100644 --- a/flytepropeller/pkg/compiler/admin.go +++ b/flytepropeller/pkg/compiler/admin.go @@ -29,8 +29,8 @@ func (p *LaunchPlanInterfaceProvider) GetExpectedOutputs() *core.VariableMap { func NewLaunchPlanInterfaceProvider(launchPlan *admin.LaunchPlan) *LaunchPlanInterfaceProvider { return &LaunchPlanInterfaceProvider{ - expectedInputs: launchPlan.Closure.ExpectedInputs, - expectedOutputs: launchPlan.Closure.ExpectedOutputs, - identifier: launchPlan.Id, + expectedInputs: launchPlan.GetClosure().GetExpectedInputs(), + expectedOutputs: launchPlan.GetClosure().GetExpectedOutputs(), + identifier: launchPlan.GetId(), } } diff --git a/flytepropeller/pkg/compiler/admin_test.go b/flytepropeller/pkg/compiler/admin_test.go index 8e5447bfb1..a4a0891d51 100644 --- a/flytepropeller/pkg/compiler/admin_test.go +++ b/flytepropeller/pkg/compiler/admin_test.go @@ -59,15 +59,15 @@ func TestGetId(t *testing.T) { func TestGetExpectedInputs(t *testing.T) { launchPlan := getDummyLaunchPlan() provider := NewLaunchPlanInterfaceProvider(launchPlan) - assert.Contains(t, (*provider.GetExpectedInputs()).Parameters, "foo") - assert.NotNil(t, (*provider.GetExpectedInputs()).Parameters["foo"].Var.Type.GetSimple()) - assert.EqualValues(t, "STRING", (*provider.GetExpectedInputs()).Parameters["foo"].Var.Type.GetSimple().String()) - assert.NotNil(t, (*provider.GetExpectedInputs()).Parameters["foo"].GetDefault()) + assert.Contains(t, (*provider.GetExpectedInputs()).GetParameters(), "foo") + assert.NotNil(t, (*provider.GetExpectedInputs()).GetParameters()["foo"].GetVar().GetType().GetSimple()) + assert.EqualValues(t, "STRING", (*provider.GetExpectedInputs()).GetParameters()["foo"].GetVar().GetType().GetSimple().String()) + assert.NotNil(t, (*provider.GetExpectedInputs()).GetParameters()["foo"].GetDefault()) } func TestGetExpectedOutputs(t *testing.T) { launchPlan := getDummyLaunchPlan() provider := NewLaunchPlanInterfaceProvider(launchPlan) - assert.EqualValues(t, outputs.Variables["foo"].GetType().GetType(), - provider.GetExpectedOutputs().Variables["foo"].GetType().GetType()) + assert.EqualValues(t, outputs.GetVariables()["foo"].GetType().GetType(), + provider.GetExpectedOutputs().GetVariables()["foo"].GetType().GetType()) } diff --git a/flytepropeller/pkg/compiler/builders.go b/flytepropeller/pkg/compiler/builders.go index 908a4b91cb..6e875abc40 100644 --- a/flytepropeller/pkg/compiler/builders.go +++ b/flytepropeller/pkg/compiler/builders.go @@ -32,8 +32,8 @@ type workflowBuilder struct { } func (w workflowBuilder) GetFailureNode() c.Node { - if w.GetCoreWorkflow() != nil && w.GetCoreWorkflow().GetTemplate() != nil && w.GetCoreWorkflow().GetTemplate().FailureNode != nil { - return w.GetOrCreateNodeBuilder(w.GetCoreWorkflow().GetTemplate().FailureNode) + if w.GetCoreWorkflow() != nil && w.GetCoreWorkflow().GetTemplate() != nil && w.GetCoreWorkflow().GetTemplate().GetFailureNode() != nil { + return w.GetOrCreateNodeBuilder(w.GetCoreWorkflow().GetTemplate().GetFailureNode()) } return nil @@ -152,8 +152,8 @@ func (t taskBuilder) GetCoreTask() *core.TaskTemplate { } func (t taskBuilder) GetID() c.Identifier { - if t.flyteTask.Id != nil { - return t.flyteTask.Id + if t.flyteTask.GetId() != nil { + return t.flyteTask.GetId() } return &core.Identifier{} diff --git a/flytepropeller/pkg/compiler/common/id_set.go b/flytepropeller/pkg/compiler/common/id_set.go index eb118fae64..a5cf98dd8d 100644 --- a/flytepropeller/pkg/compiler/common/id_set.go +++ b/flytepropeller/pkg/compiler/common/id_set.go @@ -62,24 +62,24 @@ type sortableSliceOfString []Identifier func (s sortableSliceOfString) Len() int { return len(s) } func (s sortableSliceOfString) Less(i, j int) bool { first, second := s[i], s[j] - if first.ResourceType != second.ResourceType { - return first.ResourceType < second.ResourceType + if first.GetResourceType() != second.GetResourceType() { + return first.GetResourceType() < second.GetResourceType() } - if first.Project != second.Project { - return first.Project < second.Project + if first.GetProject() != second.GetProject() { + return first.GetProject() < second.GetProject() } - if first.Domain != second.Domain { - return first.Domain < second.Domain + if first.GetDomain() != second.GetDomain() { + return first.GetDomain() < second.GetDomain() } - if first.Name != second.Name { - return first.Name < second.Name + if first.GetName() != second.GetName() { + return first.GetName() < second.GetName() } - if first.Version != second.Version { - return first.Version < second.Version + if first.GetVersion() != second.GetVersion() { + return first.GetVersion() < second.GetVersion() } return false diff --git a/flytepropeller/pkg/compiler/common/index.go b/flytepropeller/pkg/compiler/common/index.go index 365a3356c1..d244103e35 100644 --- a/flytepropeller/pkg/compiler/common/index.go +++ b/flytepropeller/pkg/compiler/common/index.go @@ -55,16 +55,16 @@ func NewWorkflowIndex(workflows []*core.CompiledWorkflow, errs errors.CompileErr ok = true index = make(WorkflowIndex, len(workflows)) for _, wf := range workflows { - if wf.Template.Id == nil { + if wf.GetTemplate().GetId() == nil { // TODO: Log/Return error return nil, false } - if _, found := index[wf.Template.Id.String()]; found { - errs.Collect(errors.NewDuplicateIDFoundErr(wf.Template.Id.String())) + if _, found := index[wf.GetTemplate().GetId().String()]; found { + errs.Collect(errors.NewDuplicateIDFoundErr(wf.GetTemplate().GetId().String())) ok = false } else { - index[wf.Template.Id.String()] = wf + index[wf.GetTemplate().GetId().String()] = wf } } diff --git a/flytepropeller/pkg/compiler/requirements.go b/flytepropeller/pkg/compiler/requirements.go index b3b01823a6..69265b64a1 100644 --- a/flytepropeller/pkg/compiler/requirements.go +++ b/flytepropeller/pkg/compiler/requirements.go @@ -57,11 +57,11 @@ func getRequirements(fg *core.WorkflowTemplate, subWfs common.WorkflowIndex, fol func updateWorkflowRequirements(workflow *core.WorkflowTemplate, subWfs common.WorkflowIndex, taskIds, workflowIds common.IdentifierSet, followSubworkflows bool, errs errors.CompileErrors) { - for _, node := range workflow.Nodes { + for _, node := range workflow.GetNodes() { updateNodeRequirements(node, subWfs, taskIds, workflowIds, followSubworkflows, errs) } - if workflow.FailureNode != nil { - updateNodeRequirements(workflow.FailureNode, subWfs, taskIds, workflowIds, followSubworkflows, errs) + if workflow.GetFailureNode() != nil { + updateNodeRequirements(workflow.GetFailureNode(), subWfs, taskIds, workflowIds, followSubworkflows, errs) } } @@ -75,21 +75,21 @@ func updateNodeRequirements(node *flyteNode, subWfs common.WorkflowIndex, taskId workflowIds.Insert(workflowNode.GetLaunchplanRef()) } else if workflowNode.GetSubWorkflowRef() != nil && followSubworkflows { if subWf, found := subWfs[workflowNode.GetSubWorkflowRef().String()]; !found { - errs.Collect(errors.NewWorkflowReferenceNotFoundErr(node.Id, workflowNode.GetSubWorkflowRef().String())) + errs.Collect(errors.NewWorkflowReferenceNotFoundErr(node.GetId(), workflowNode.GetSubWorkflowRef().String())) } else { - updateWorkflowRequirements(subWf.Template, subWfs, taskIds, workflowIds, followSubworkflows, errs) + updateWorkflowRequirements(subWf.GetTemplate(), subWfs, taskIds, workflowIds, followSubworkflows, errs) } } } else if branchN := node.GetBranchNode(); branchN != nil { - updateNodeRequirements(branchN.IfElse.Case.ThenNode, subWfs, taskIds, workflowIds, followSubworkflows, errs) - for _, otherCase := range branchN.IfElse.Other { - updateNodeRequirements(otherCase.ThenNode, subWfs, taskIds, workflowIds, followSubworkflows, errs) + updateNodeRequirements(branchN.GetIfElse().GetCase().GetThenNode(), subWfs, taskIds, workflowIds, followSubworkflows, errs) + for _, otherCase := range branchN.GetIfElse().GetOther() { + updateNodeRequirements(otherCase.GetThenNode(), subWfs, taskIds, workflowIds, followSubworkflows, errs) } - if elseNode := branchN.IfElse.GetElseNode(); elseNode != nil { + if elseNode := branchN.GetIfElse().GetElseNode(); elseNode != nil { updateNodeRequirements(elseNode, subWfs, taskIds, workflowIds, followSubworkflows, errs) } } else if arrayNode := node.GetArrayNode(); arrayNode != nil { - updateNodeRequirements(arrayNode.Node, subWfs, taskIds, workflowIds, followSubworkflows, errs) + updateNodeRequirements(arrayNode.GetNode(), subWfs, taskIds, workflowIds, followSubworkflows, errs) } } diff --git a/flytepropeller/pkg/compiler/task_compiler.go b/flytepropeller/pkg/compiler/task_compiler.go index 4d8fea46db..ea6e4efef2 100644 --- a/flytepropeller/pkg/compiler/task_compiler.go +++ b/flytepropeller/pkg/compiler/task_compiler.go @@ -23,25 +23,25 @@ func validateResource(resourceName core.Resources_ResourceName, resourceVal stri func validateKnownResources(resources []*core.Resources_ResourceEntry, errs errors.CompileErrors) { for _, r := range resources { - validateResource(r.Name, r.Value, errs.NewScope()) + validateResource(r.GetName(), r.GetValue(), errs.NewScope()) } } func validateResources(resources *core.Resources, errs errors.CompileErrors) (ok bool) { // Validate known resource keys. - validateKnownResources(resources.Requests, errs.NewScope()) - validateKnownResources(resources.Limits, errs.NewScope()) + validateKnownResources(resources.GetRequests(), errs.NewScope()) + validateKnownResources(resources.GetLimits(), errs.NewScope()) return !errs.HasErrors() } func validateContainerCommand(task *core.TaskTemplate, errs errors.CompileErrors) (ok bool) { - if task.Interface == nil { + if task.GetInterface() == nil { // Nothing to validate. return } - hasInputs := task.Interface.Inputs != nil && len(task.Interface.GetInputs().Variables) > 0 - hasOutputs := task.Interface.Outputs != nil && len(task.Interface.GetOutputs().Variables) > 0 + hasInputs := task.GetInterface().GetInputs() != nil && len(task.GetInterface().GetInputs().GetVariables()) > 0 + hasOutputs := task.GetInterface().GetOutputs() != nil && len(task.GetInterface().GetOutputs().GetVariables()) > 0 if !(hasInputs || hasOutputs) { // Nothing to validate. return @@ -63,12 +63,12 @@ func validateContainer(task *core.TaskTemplate, errs errors.CompileErrors) (ok b validateContainerCommand(task, errs) container := task.GetContainer() - if container.Image == "" { + if container.GetImage() == "" { errs.Collect(errors.NewValueRequiredErr("container", "image")) } - if container.Resources != nil { - validateResources(container.Resources, errs.NewScope()) + if container.GetResources() != nil { + validateResources(container.GetResources(), errs.NewScope()) } return !errs.HasErrors() @@ -80,7 +80,7 @@ func validateK8sPod(task *core.TaskTemplate, errs errors.CompileErrors) (ok bool return } var podSpec v1.PodSpec - if err := utils.UnmarshalStructToObj(task.GetK8SPod().PodSpec, &podSpec); err != nil { + if err := utils.UnmarshalStructToObj(task.GetK8SPod().GetPodSpec(), &podSpec); err != nil { errs.Collect(errors.NewInvalidValueErr("root", "k8s pod spec")) return } @@ -93,7 +93,7 @@ func validateK8sPod(task *core.TaskTemplate, errs errors.CompileErrors) (ok bool } func compileTaskInternal(task *core.TaskTemplate, errs errors.CompileErrors) common.Task { - if task.Id == nil { + if task.GetId() == nil { errs.Collect(errors.NewValueRequiredErr("root", "Id")) } diff --git a/flytepropeller/pkg/compiler/test/compiler_test.go b/flytepropeller/pkg/compiler/test/compiler_test.go index 355fc4a15b..a6925dc3de 100644 --- a/flytepropeller/pkg/compiler/test/compiler_test.go +++ b/flytepropeller/pkg/compiler/test/compiler_test.go @@ -3,7 +3,6 @@ package test import ( "encoding/json" "flag" - "io/ioutil" "os" "path/filepath" "reflect" @@ -36,27 +35,27 @@ func makeDefaultInputs(iface *core.TypedInterface) *core.LiteralMap { return nil } - res := make(map[string]*core.Literal, len(iface.GetInputs().Variables)) - for inputName, inputVar := range iface.GetInputs().Variables { + res := make(map[string]*core.Literal, len(iface.GetInputs().GetVariables())) + for inputName, inputVar := range iface.GetInputs().GetVariables() { // A workaround because the coreutils don't support the "StructuredDataSet" type - if reflect.TypeOf(inputVar.Type.Type) == reflect.TypeOf(&core.LiteralType_StructuredDatasetType{}) { + if reflect.TypeOf(inputVar.GetType().GetType()) == reflect.TypeOf(&core.LiteralType_StructuredDatasetType{}) { res[inputName] = &core.Literal{ Value: &core.Literal_Scalar{ Scalar: &core.Scalar{ Value: &core.Scalar_StructuredDataset{ StructuredDataset: &core.StructuredDataset{ Metadata: &core.StructuredDatasetMetadata{ - StructuredDatasetType: inputVar.Type.Type.(*core.LiteralType_StructuredDatasetType).StructuredDatasetType, + StructuredDatasetType: inputVar.GetType().GetType().(*core.LiteralType_StructuredDatasetType).StructuredDatasetType, }, }, }, }, }, } - } else if reflect.TypeOf(inputVar.Type.Type) == reflect.TypeOf(&core.LiteralType_Simple{}) && inputVar.Type.GetSimple() == core.SimpleType_DATETIME { + } else if reflect.TypeOf(inputVar.GetType().GetType()) == reflect.TypeOf(&core.LiteralType_Simple{}) && inputVar.GetType().GetSimple() == core.SimpleType_DATETIME { res[inputName] = coreutils.MustMakeLiteral(time.UnixMicro(10)) } else { - res[inputName] = coreutils.MustMakeDefaultLiteralForType(inputVar.Type) + res[inputName] = coreutils.MustMakeDefaultLiteralForType(inputVar.GetType()) } } @@ -114,7 +113,7 @@ func TestDynamic(t *testing.T) { // t.SkipNow() //} - raw, err := ioutil.ReadFile(path) + raw, err := os.ReadFile(path) assert.NoError(t, err) wf := &core.DynamicJobSpec{} err = utils.UnmarshalBytesToPb(raw, wf) @@ -123,7 +122,7 @@ func TestDynamic(t *testing.T) { } t.Log("Compiling Workflow") - compiledTasks := mustCompileTasks(t, wf.Tasks) + compiledTasks := mustCompileTasks(t, wf.GetTasks()) wfTemplate := &core.WorkflowTemplate{ Id: &core.Identifier{ Domain: "domain", @@ -146,16 +145,16 @@ func TestDynamic(t *testing.T) { }, }}, }, - Nodes: wf.Nodes, - Outputs: wf.Outputs, + Nodes: wf.GetNodes(), + Outputs: wf.GetOutputs(), } - compiledWfc, err := compiler.CompileWorkflow(wfTemplate, wf.Subworkflows, compiledTasks, + compiledWfc, err := compiler.CompileWorkflow(wfTemplate, wf.GetSubworkflows(), compiledTasks, []common.InterfaceProvider{}) if !assert.NoError(t, err) { t.FailNow() } - inputs := makeDefaultInputs(compiledWfc.Primary.Template.Interface) + inputs := makeDefaultInputs(compiledWfc.GetPrimary().GetTemplate().GetInterface()) flyteWf, err := k8s.BuildFlyteWorkflow(compiledWfc, inputs, @@ -180,22 +179,22 @@ func TestDynamic(t *testing.T) { func getAllSubNodeIDs(n *core.Node) sets.String { res := sets.NewString() if branchNode := n.GetBranchNode(); branchNode != nil { - thenNode := branchNode.IfElse.Case.ThenNode + thenNode := branchNode.GetIfElse().GetCase().GetThenNode() if hasPromiseInputs(thenNode.GetInputs()) { res.Insert(thenNode.GetId()) } res = res.Union(getAllSubNodeIDs(thenNode)) - for _, other := range branchNode.IfElse.Other { - if hasPromiseInputs(other.ThenNode.GetInputs()) { - res.Insert(other.ThenNode.GetId()) + for _, other := range branchNode.GetIfElse().GetOther() { + if hasPromiseInputs(other.GetThenNode().GetInputs()) { + res.Insert(other.GetThenNode().GetId()) } - res = res.Union(getAllSubNodeIDs(other.ThenNode)) + res = res.Union(getAllSubNodeIDs(other.GetThenNode())) } - if elseNode := branchNode.IfElse.GetElseNode(); elseNode != nil { + if elseNode := branchNode.GetIfElse().GetElseNode(); elseNode != nil { if hasPromiseInputs(elseNode.GetInputs()) { res.Insert(elseNode.GetId()) } @@ -221,7 +220,7 @@ var allNodesPredicate = func(n *core.Node) bool { func getAllMatchingNodes(wf *core.CompiledWorkflow, predicate nodePredicate) sets.String { s := sets.NewString() - for _, n := range wf.Template.Nodes { + for _, n := range wf.GetTemplate().GetNodes() { if predicate(n) { s.Insert(n.GetId()) } @@ -235,13 +234,13 @@ func getAllMatchingNodes(wf *core.CompiledWorkflow, predicate nodePredicate) set func bindingHasPromiseInputs(binding *core.BindingData) bool { switch v := binding.GetValue().(type) { case *core.BindingData_Collection: - for _, d := range v.Collection.Bindings { + for _, d := range v.Collection.GetBindings() { if bindingHasPromiseInputs(d) { return true } } case *core.BindingData_Map: - for _, d := range v.Map.Bindings { + for _, d := range v.Map.GetBindings() { if bindingHasPromiseInputs(d) { return true } @@ -255,7 +254,7 @@ func bindingHasPromiseInputs(binding *core.BindingData) bool { func hasPromiseInputs(bindings []*core.Binding) bool { for _, b := range bindings { - if bindingHasPromiseInputs(b.Binding) { + if bindingHasPromiseInputs(b.GetBinding()) { return true } } @@ -265,14 +264,14 @@ func hasPromiseInputs(bindings []*core.Binding) bool { func assertNodeIDsInConnections(t testing.TB, nodeIDsWithDeps, allNodeIDs sets.String, connections *core.ConnectionSet) bool { actualNodeIDs := sets.NewString() - for id, lst := range connections.Downstream { + for id, lst := range connections.GetDownstream() { actualNodeIDs.Insert(id) - actualNodeIDs.Insert(lst.Ids...) + actualNodeIDs.Insert(lst.GetIds()...) } - for id, lst := range connections.Upstream { + for id, lst := range connections.GetUpstream() { actualNodeIDs.Insert(id) - actualNodeIDs.Insert(lst.Ids...) + actualNodeIDs.Insert(lst.GetIds()...) } notFoundInConnections := nodeIDsWithDeps.Difference(actualNodeIDs) @@ -305,13 +304,13 @@ func storeOrDiff(t testing.TB, f func(obj any) ([]byte, error), obj any, path st } if *update { - err = ioutil.WriteFile(path, raw, os.ModePerm) + err = os.WriteFile(path, raw, os.ModePerm) // #nosec G306 if !assert.NoError(t, err) { return false } } else { - goldenRaw, err := ioutil.ReadFile(path) + goldenRaw, err := os.ReadFile(path) if !assert.NoError(t, err) { return false } @@ -339,7 +338,7 @@ func runCompileTest(t *testing.T, dirName string) { } for _, p := range paths { - raw, err := ioutil.ReadFile(p) + raw, err := os.ReadFile(p) assert.NoError(t, err) tsk := &admin.TaskSpec{} err = proto.Unmarshal(raw, tsk) @@ -349,13 +348,13 @@ func runCompileTest(t *testing.T, dirName string) { } t.Run(p, func(t *testing.T) { - inputTask := tsk.Template + inputTask := tsk.GetTemplate() setDefaultFields(inputTask) task, err := compiler.CompileTask(inputTask) if !assert.NoError(t, err) { t.FailNow() } - compiledTasks[tsk.Template.Id.String()] = task + compiledTasks[tsk.GetTemplate().GetId().String()] = task // unmarshal from json file to compare rather than yaml taskFile := filepath.Join(filepath.Dir(p), "compiled", strings.TrimRight(filepath.Base(p), filepath.Ext(p))+"_task.json") @@ -387,7 +386,7 @@ func runCompileTest(t *testing.T, dirName string) { } t.Run(p, func(t *testing.T) { - inputWf := wf.Workflow + inputWf := wf.GetWorkflow() reqs, err := compiler.GetRequirements(inputWf, nil) if !assert.NoError(t, err) { @@ -411,9 +410,9 @@ func runCompileTest(t *testing.T, dirName string) { t.FailNow() } - allNodeIDs := getAllMatchingNodes(compiledWfc.Primary, allNodesPredicate) - nodeIDsWithDeps := getAllMatchingNodes(compiledWfc.Primary, hasPromiseNodePredicate) - if !assertNodeIDsInConnections(t, nodeIDsWithDeps, allNodeIDs, compiledWfc.Primary.Connections) { + allNodeIDs := getAllMatchingNodes(compiledWfc.GetPrimary(), allNodesPredicate) + nodeIDsWithDeps := getAllMatchingNodes(compiledWfc.GetPrimary(), hasPromiseNodePredicate) + if !assertNodeIDsInConnections(t, nodeIDsWithDeps, allNodeIDs, compiledWfc.GetPrimary().GetConnections()) { t.FailNow() } @@ -433,7 +432,7 @@ func runCompileTest(t *testing.T, dirName string) { for _, p := range paths { t.Run(p, func(t *testing.T) { - raw, err := ioutil.ReadFile(p) + raw, err := os.ReadFile(p) if !assert.NoError(t, err) { t.FailNow() } @@ -443,9 +442,9 @@ func runCompileTest(t *testing.T, dirName string) { t.FailNow() } - inputs := makeDefaultInputs(compiledWfc.Primary.Template.Interface) + inputs := makeDefaultInputs(compiledWfc.GetPrimary().GetTemplate().GetInterface()) - dotFormat := visualize.ToGraphViz(compiledWfc.Primary) + dotFormat := visualize.ToGraphViz(compiledWfc.GetPrimary()) t.Logf("GraphViz Dot: %v\n", dotFormat) flyteWf, err := k8s.BuildFlyteWorkflow(compiledWfc, diff --git a/flytepropeller/pkg/compiler/transformers/k8s/inputs.go b/flytepropeller/pkg/compiler/transformers/k8s/inputs.go index a49d92c21b..75b2d42c26 100644 --- a/flytepropeller/pkg/compiler/transformers/k8s/inputs.go +++ b/flytepropeller/pkg/compiler/transformers/k8s/inputs.go @@ -15,28 +15,28 @@ func validateInputs(nodeID common.NodeID, iface *core.TypedInterface, inputs cor return false } - if iface.Inputs == nil { + if iface.GetInputs() == nil { errs.Collect(errors.NewValueRequiredErr(nodeID, "interface.InputsRef")) return false } - varMap := make(map[string]*core.Variable, len(iface.Inputs.Variables)) + varMap := make(map[string]*core.Variable, len(iface.GetInputs().GetVariables())) requiredInputsSet := sets.String{} - for name, v := range iface.Inputs.Variables { + for name, v := range iface.GetInputs().GetVariables() { varMap[name] = v requiredInputsSet.Insert(name) } boundInputsSet := sets.String{} - for inputVar, inputVal := range inputs.Literals { + for inputVar, inputVal := range inputs.GetLiterals() { v, exists := varMap[inputVar] if !exists { errs.Collect(errors.NewVariableNameNotFoundErr(nodeID, "", inputVar)) continue } - if !validators.IsInstance(inputVal, v.Type) { - errs.Collect(errors.NewMismatchingInstanceErr(nodeID, inputVar, common.LiteralTypeToStr(v.Type), inputVal.String())) + if !validators.IsInstance(inputVal, v.GetType()) { + errs.Collect(errors.NewMismatchingInstanceErr(nodeID, inputVar, common.LiteralTypeToStr(v.GetType()), inputVal.String())) continue } diff --git a/flytepropeller/pkg/compiler/transformers/k8s/node.go b/flytepropeller/pkg/compiler/transformers/k8s/node.go index 8a4c9248ec..18ec1ba02f 100644 --- a/flytepropeller/pkg/compiler/transformers/k8s/node.go +++ b/flytepropeller/pkg/compiler/transformers/k8s/node.go @@ -35,8 +35,8 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile taskID := n.GetTaskNode().GetReferenceId().String() // TODO: Use task index for quick lookup for _, t := range tasks { - if t.Template.Id.String() == taskID { - task = t.Template + if t.GetTemplate().GetId().String() == taskID { + task = t.GetTemplate() break } } @@ -46,7 +46,7 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile return nil, !errs.HasErrors() } - if overrides := n.GetTaskNode().Overrides; overrides != nil { + if overrides := n.GetTaskNode().GetOverrides(); overrides != nil { if overrides.GetResources() != nil { resources = overrides.GetResources() } @@ -87,7 +87,7 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile interruptVal := n.GetMetadata().GetInterruptible() interruptible = &interruptVal } - name = n.GetMetadata().Name + name = n.GetMetadata().GetName() } nodeSpec := &v1alpha1.NodeSpec{ @@ -114,7 +114,7 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile return nil, !errs.HasErrors() } - switch n.GetWorkflowNode().Reference.(type) { + switch n.GetWorkflowNode().GetReference().(type) { case *core.WorkflowNode_LaunchplanRef: nodeSpec.Kind = v1alpha1.NodeKindWorkflow nodeSpec.WorkflowNode = &v1alpha1.WorkflowNodeSpec{ @@ -146,7 +146,7 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile case *core.Node_GateNode: nodeSpec.Kind = v1alpha1.NodeKindGate gateNode := n.GetGateNode() - switch gateNode.Condition.(type) { + switch gateNode.GetCondition().(type) { case *core.GateNode_Approve: nodeSpec.GateNode = &v1alpha1.GateNodeSpec{ Kind: v1alpha1.ConditionKindApprove, @@ -173,7 +173,7 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile arrayNode := n.GetArrayNode() // build subNodeSpecs - subNodeSpecs, ok := buildNodeSpec(arrayNode.Node, tasks, errs) + subNodeSpecs, ok := buildNodeSpec(arrayNode.GetNode(), tasks, errs) if !ok { return nil, ok } @@ -191,7 +191,7 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile Parallelism: parallelism, } - switch successCriteria := arrayNode.SuccessCriteria.(type) { + switch successCriteria := arrayNode.GetSuccessCriteria().(type) { case *core.ArrayNode_MinSuccesses: nodeSpec.ArrayNode.MinSuccesses = &successCriteria.MinSuccesses case *core.ArrayNode_MinSuccessRatio: @@ -209,13 +209,13 @@ func buildNodeSpec(n *core.Node, tasks []*core.CompiledTask, errs errors.Compile } func buildIfBlockSpec(block *core.IfBlock, tasks []*core.CompiledTask, errs errors.CompileErrors) (*v1alpha1.IfBlock, []*v1alpha1.NodeSpec) { - nodeSpecs, ok := buildNodeSpec(block.ThenNode, tasks, errs) + nodeSpecs, ok := buildNodeSpec(block.GetThenNode(), tasks, errs) if !ok { return nil, []*v1alpha1.NodeSpec{} } return &v1alpha1.IfBlock{ - Condition: v1alpha1.BooleanExpression{BooleanExpression: block.Condition}, - ThenNode: refStr(block.ThenNode.Id), + Condition: v1alpha1.BooleanExpression{BooleanExpression: block.GetCondition()}, + ThenNode: refStr(block.GetThenNode().GetId()), }, nodeSpecs } @@ -226,26 +226,26 @@ func buildBranchNodeSpec(branch *core.BranchNode, tasks []*core.CompiledTask, er var childNodes []*v1alpha1.NodeSpec - branchNode, nodeSpecs := buildIfBlockSpec(branch.IfElse.Case, tasks, errs.NewScope()) + branchNode, nodeSpecs := buildIfBlockSpec(branch.GetIfElse().GetCase(), tasks, errs.NewScope()) res := &v1alpha1.BranchNodeSpec{ If: *branchNode, } childNodes = append(childNodes, nodeSpecs...) - switch branch.IfElse.GetDefault().(type) { + switch branch.GetIfElse().GetDefault().(type) { case *core.IfElseBlock_ElseNode: - ns, ok := buildNodeSpec(branch.IfElse.GetElseNode(), tasks, errs) + ns, ok := buildNodeSpec(branch.GetIfElse().GetElseNode(), tasks, errs) if !ok { return nil, []*v1alpha1.NodeSpec{} } childNodes = append(childNodes, ns...) - res.Else = refStr(branch.IfElse.GetElseNode().Id) + res.Else = refStr(branch.GetIfElse().GetElseNode().GetId()) case *core.IfElseBlock_Error: - res.ElseFail = branch.IfElse.GetError() + res.ElseFail = branch.GetIfElse().GetError() } - other := make([]*v1alpha1.IfBlock, 0, len(branch.IfElse.Other)) - for _, block := range branch.IfElse.Other { + other := make([]*v1alpha1.IfBlock, 0, len(branch.GetIfElse().GetOther())) + for _, block := range branch.GetIfElse().GetOther() { b, ns := buildIfBlockSpec(block, tasks, errs.NewScope()) other = append(other, b) childNodes = append(childNodes, ns...) @@ -285,12 +285,12 @@ func buildTasks(tasks []*core.CompiledTask, errs errors.CompileErrors) map[commo if flyteTask == nil { errs.Collect(errors.NewValueRequiredErr("root", "coreTask")) } else { - taskID := flyteTask.Template.Id.String() + taskID := flyteTask.GetTemplate().GetId().String() if _, exists := res[taskID]; exists { errs.Collect(errors.NewValueCollisionError(taskID, "Id", taskID)) } - res[taskID] = &v1alpha1.TaskSpec{TaskTemplate: flyteTask.Template} + res[taskID] = &v1alpha1.TaskSpec{TaskTemplate: flyteTask.GetTemplate()} } } diff --git a/flytepropeller/pkg/compiler/transformers/k8s/node_test.go b/flytepropeller/pkg/compiler/transformers/k8s/node_test.go index c6a08b5991..28fbb2bf55 100644 --- a/flytepropeller/pkg/compiler/transformers/k8s/node_test.go +++ b/flytepropeller/pkg/compiler/transformers/k8s/node_test.go @@ -175,7 +175,7 @@ func TestBuildNodeSpec(t *testing.T) { n.Node.Target = &core.Node_WorkflowNode{ WorkflowNode: &core.WorkflowNode{ Reference: &core.WorkflowNode_SubWorkflowRef{ - SubWorkflowRef: n.subWF.GetCoreWorkflow().Template.Id, + SubWorkflowRef: n.subWF.GetCoreWorkflow().GetTemplate().GetId(), }, }, } @@ -394,15 +394,15 @@ func TestBuildTasks(t *testing.T) { taskMap := buildTasks(tasks, errs) annInputTask := taskMap[(&core.Identifier{Name: "annotatedInput"}).String()] - assert.Nil(t, annInputTask.Interface.Inputs.Variables["a"].Type.Annotation) + assert.Nil(t, annInputTask.Interface.GetInputs().GetVariables()["a"].GetType().GetAnnotation()) unAnnInputTask := taskMap[(&core.Identifier{Name: "unannotatedInput"}).String()] - assert.Nil(t, unAnnInputTask.Interface.Inputs.Variables["a"].Type.Annotation) + assert.Nil(t, unAnnInputTask.Interface.GetInputs().GetVariables()["a"].GetType().GetAnnotation()) annOutputTask := taskMap[(&core.Identifier{Name: "annotatedOutput"}).String()] - assert.Nil(t, annOutputTask.Interface.Outputs.Variables["a"].Type.Annotation) + assert.Nil(t, annOutputTask.Interface.GetOutputs().GetVariables()["a"].GetType().GetAnnotation()) unAnnOutputTask := taskMap[(&core.Identifier{Name: "unannotatedOutput"}).String()] - assert.Nil(t, unAnnOutputTask.Interface.Outputs.Variables["a"].Type.Annotation) + assert.Nil(t, unAnnOutputTask.Interface.GetOutputs().GetVariables()["a"].GetType().GetAnnotation()) }) } diff --git a/flytepropeller/pkg/compiler/transformers/k8s/utils.go b/flytepropeller/pkg/compiler/transformers/k8s/utils.go index 06884f4b75..b8e15e176b 100644 --- a/flytepropeller/pkg/compiler/transformers/k8s/utils.go +++ b/flytepropeller/pkg/compiler/transformers/k8s/utils.go @@ -19,15 +19,15 @@ func refStr(s string) *string { } func computeRetryStrategy(n *core.Node, t *core.TaskTemplate) *v1alpha1.RetryStrategy { - if n.GetMetadata() != nil && n.GetMetadata().GetRetries() != nil && n.GetMetadata().GetRetries().Retries != 0 { + if n.GetMetadata() != nil && n.GetMetadata().GetRetries() != nil && n.GetMetadata().GetRetries().GetRetries() != 0 { return &v1alpha1.RetryStrategy{ - MinAttempts: refInt(int(n.GetMetadata().GetRetries().Retries + 1)), + MinAttempts: refInt(int(n.GetMetadata().GetRetries().GetRetries() + 1)), } } - if t != nil && t.GetMetadata() != nil && t.GetMetadata().GetRetries() != nil && t.GetMetadata().GetRetries().Retries != 0 { + if t != nil && t.GetMetadata() != nil && t.GetMetadata().GetRetries() != nil && t.GetMetadata().GetRetries().GetRetries() != 0 { return &v1alpha1.RetryStrategy{ - MinAttempts: refInt(int(t.GetMetadata().GetRetries().Retries + 1)), + MinAttempts: refInt(int(t.GetMetadata().GetRetries().GetRetries() + 1)), } } @@ -86,15 +86,25 @@ func StripTypeMetadata(t *core.LiteralType) *core.LiteralType { c := *t c.Metadata = nil - c.Annotation = nil + + // Special-case the presence of cache-key-metadata. This is a special field that is used to store metadata about + // used in the cache key generation. This does not affect compatibility and it is purely used for cache key calculations. + if c.GetAnnotation() != nil { + annotations := c.GetAnnotation().GetAnnotations().GetFields() + // The presence of the key `cache-key-metadata` indicates that we should leave the metadata intact. + if _, ok := annotations["cache-key-metadata"]; !ok { + c.Annotation = nil + } + } + // Note that we cannot strip `Structure` from the type because the dynamic node output type is used to validate the // interface of the dynamically compiled workflow. `Structure` is used to extend type checking information on // different Flyte types and is therefore required to ensure correct type validation. - switch underlyingType := c.Type.(type) { + switch underlyingType := c.GetType().(type) { case *core.LiteralType_UnionType: - variants := make([]*core.LiteralType, 0, len(c.GetUnionType().Variants)) - for _, variant := range c.GetUnionType().Variants { + variants := make([]*core.LiteralType, 0, len(c.GetUnionType().GetVariants())) + for _, variant := range c.GetUnionType().GetVariants() { variants = append(variants, StripTypeMetadata(variant)) } @@ -104,11 +114,11 @@ func StripTypeMetadata(t *core.LiteralType) *core.LiteralType { case *core.LiteralType_CollectionType: underlyingType.CollectionType = StripTypeMetadata(c.GetCollectionType()) case *core.LiteralType_StructuredDatasetType: - columns := make([]*core.StructuredDatasetType_DatasetColumn, 0, len(c.GetStructuredDatasetType().Columns)) - for _, column := range c.GetStructuredDatasetType().Columns { + columns := make([]*core.StructuredDatasetType_DatasetColumn, 0, len(c.GetStructuredDatasetType().GetColumns())) + for _, column := range c.GetStructuredDatasetType().GetColumns() { columns = append(columns, &core.StructuredDatasetType_DatasetColumn{ - Name: column.Name, - LiteralType: StripTypeMetadata(column.LiteralType), + Name: column.GetName(), + LiteralType: StripTypeMetadata(column.GetLiteralType()), }) } @@ -125,17 +135,17 @@ func StripInterfaceTypeMetadata(iface *core.TypedInterface) *core.TypedInterface newIface := *iface - if iface.Inputs != nil { - for name, i := range iface.Inputs.Variables { - i.Type = StripTypeMetadata(i.Type) + if iface.GetInputs() != nil { + for name, i := range iface.GetInputs().GetVariables() { + i.Type = StripTypeMetadata(i.GetType()) i.Description = "" newIface.Inputs.Variables[name] = i } } - if iface.Outputs != nil { - for name, i := range iface.Outputs.Variables { - i.Type = StripTypeMetadata(i.Type) + if iface.GetOutputs() != nil { + for name, i := range iface.GetOutputs().GetVariables() { + i.Type = StripTypeMetadata(i.GetType()) i.Description = "" iface.Outputs.Variables[name] = i } diff --git a/flytepropeller/pkg/compiler/transformers/k8s/utils_test.go b/flytepropeller/pkg/compiler/transformers/k8s/utils_test.go index d2d9b10866..0a8866ff8d 100644 --- a/flytepropeller/pkg/compiler/transformers/k8s/utils_test.go +++ b/flytepropeller/pkg/compiler/transformers/k8s/utils_test.go @@ -6,6 +6,7 @@ import ( "github.com/go-test/deep" _struct "github.com/golang/protobuf/ptypes/struct" "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/known/structpb" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" ) @@ -14,9 +15,9 @@ func TestComputeRetryStrategy(t *testing.T) { tests := []struct { name string - nodeRetries int - taskRetries int - expectedRetries int + nodeRetries uint32 + taskRetries uint32 + expectedRetries uint32 }{ {"node-only", 1, 0, 2}, {"task-only", 0, 1, 2}, @@ -31,7 +32,7 @@ func TestComputeRetryStrategy(t *testing.T) { node = &core.Node{ Metadata: &core.NodeMetadata{ Retries: &core.RetryStrategy{ - Retries: uint32(test.nodeRetries), + Retries: test.nodeRetries, }, }, } @@ -42,7 +43,7 @@ func TestComputeRetryStrategy(t *testing.T) { tmpl = &core.TaskTemplate{ Metadata: &core.TaskMetadata{ Retries: &core.RetryStrategy{ - Retries: uint32(test.taskRetries), + Retries: test.taskRetries, }, }, } @@ -51,7 +52,7 @@ func TestComputeRetryStrategy(t *testing.T) { r := computeRetryStrategy(node, tmpl) if test.expectedRetries != 0 { assert.NotNil(t, r) - assert.Equal(t, test.expectedRetries, *r.MinAttempts) + assert.Equal(t, int(test.expectedRetries), *r.MinAttempts) // #nosec G115 } else { assert.Nil(t, r) } @@ -248,6 +249,99 @@ func TestStripTypeMetadata(t *testing.T) { }, }, }, + { + name: "cache-key-metadata set", + args: &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_INTEGER, + }, + Annotation: &core.TypeAnnotation{ + Annotations: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "cache-key-metadata": { + Kind: &_struct.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "foo": { + Kind: &_struct.Value_StringValue{ + StringValue: "bar", + }, + }, + }, + }, + }, + }, + }, + }, + }, + Metadata: &_struct.Struct{ + Fields: map[string]*_struct.Value{ + "foo": { + Kind: &_struct.Value_StringValue{ + StringValue: "bar", + }, + }, + }, + }, + }, + want: &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_INTEGER, + }, + Annotation: &core.TypeAnnotation{ + Annotations: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "cache-key-metadata": { + Kind: &_struct.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "foo": { + Kind: &_struct.Value_StringValue{ + StringValue: "bar", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "cache-key-metadata not present in annotation", + args: &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_INTEGER, + }, + Annotation: &core.TypeAnnotation{ + Annotations: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "some-key": { + Kind: &_struct.Value_StringValue{ + StringValue: "some-value", + }, + }, + }, + }, + }, + Metadata: &_struct.Struct{ + Fields: map[string]*_struct.Value{ + "foo": { + Kind: &_struct.Value_StringValue{ + StringValue: "bar", + }, + }, + }, + }, + }, + want: &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_INTEGER, + }, + }, + }, } for _, tt := range tests { @@ -292,7 +386,7 @@ func TestStripInterfaceTypeMetadata(t *testing.T) { } stripped := StripInterfaceTypeMetadata(i) - assert.Nil(t, stripped.Inputs.Variables["a"].Type.Metadata) - assert.Nil(t, stripped.Outputs.Variables["a"].Type.Metadata) + assert.Nil(t, stripped.GetInputs().GetVariables()["a"].GetType().GetMetadata()) + assert.Nil(t, stripped.GetOutputs().GetVariables()["a"].GetType().GetMetadata()) }) } diff --git a/flytepropeller/pkg/compiler/transformers/k8s/workflow.go b/flytepropeller/pkg/compiler/transformers/k8s/workflow.go index 2421ddf9bb..eb9023bfa2 100644 --- a/flytepropeller/pkg/compiler/transformers/k8s/workflow.go +++ b/flytepropeller/pkg/compiler/transformers/k8s/workflow.go @@ -39,13 +39,13 @@ func requiresInputs(w *core.WorkflowTemplate) bool { return false } - return len(w.GetInterface().GetInputs().Variables) > 0 + return len(w.GetInterface().GetInputs().GetVariables()) > 0 } // Note: Update WorkflowNameFromID for any change made to WorkflowIDAsString func WorkflowIDAsString(id *core.Identifier) string { b := strings.Builder{} - _, err := b.WriteString(id.Project) + _, err := b.WriteString(id.GetProject()) if err != nil { return "" } @@ -55,7 +55,7 @@ func WorkflowIDAsString(id *core.Identifier) string { return "" } - _, err = b.WriteString(id.Domain) + _, err = b.WriteString(id.GetDomain()) if err != nil { return "" } @@ -65,7 +65,7 @@ func WorkflowIDAsString(id *core.Identifier) string { return "" } - _, err = b.WriteString(id.Name) + _, err = b.WriteString(id.GetName()) if err != nil { return "" } @@ -83,10 +83,10 @@ func WorkflowNameFromID(id string) string { func buildFlyteWorkflowSpec(wf *core.CompiledWorkflow, tasks []*core.CompiledTask, errs errors.CompileErrors) ( spec *v1alpha1.WorkflowSpec, err error) { - wf.Template.Interface = StripInterfaceTypeMetadata(wf.Template.Interface) + wf.Template.Interface = StripInterfaceTypeMetadata(wf.GetTemplate().GetInterface()) var failureN *v1alpha1.NodeSpec - if n := wf.Template.GetFailureNode(); n != nil { + if n := wf.GetTemplate().GetFailureNode(); n != nil { nodes, ok := buildNodeSpec(n, tasks, errs.NewScope()) if !ok { return nil, errs @@ -94,34 +94,34 @@ func buildFlyteWorkflowSpec(wf *core.CompiledWorkflow, tasks []*core.CompiledTas failureN = nodes[0] } - nodes, _ := buildNodes(wf.Template.GetNodes(), tasks, errs.NewScope()) + nodes, _ := buildNodes(wf.GetTemplate().GetNodes(), tasks, errs.NewScope()) if errs.HasErrors() { return nil, errs } - outputBindings := make([]*v1alpha1.Binding, 0, len(wf.Template.Outputs)) - for _, b := range wf.Template.Outputs { + outputBindings := make([]*v1alpha1.Binding, 0, len(wf.GetTemplate().GetOutputs())) + for _, b := range wf.GetTemplate().GetOutputs() { outputBindings = append(outputBindings, &v1alpha1.Binding{ Binding: b, }) } var outputs *v1alpha1.OutputVarMap - if wf.Template.GetInterface() != nil { - outputs = &v1alpha1.OutputVarMap{VariableMap: wf.Template.GetInterface().Outputs} + if wf.GetTemplate().GetInterface() != nil { + outputs = &v1alpha1.OutputVarMap{VariableMap: wf.GetTemplate().GetInterface().GetOutputs()} } else { outputs = &v1alpha1.OutputVarMap{VariableMap: &core.VariableMap{}} } failurePolicy := v1alpha1.WorkflowOnFailurePolicy(core.WorkflowMetadata_FAIL_IMMEDIATELY) - if wf.Template != nil && wf.Template.Metadata != nil { - failurePolicy = v1alpha1.WorkflowOnFailurePolicy(wf.Template.Metadata.OnFailure) + if wf.GetTemplate() != nil && wf.GetTemplate().GetMetadata() != nil { + failurePolicy = v1alpha1.WorkflowOnFailurePolicy(wf.GetTemplate().GetMetadata().GetOnFailure()) } connections := buildConnections(wf) return &v1alpha1.WorkflowSpec{ - ID: WorkflowIDAsString(wf.Template.Id), + ID: WorkflowIDAsString(wf.GetTemplate().GetId()), OnFailure: failureN, Nodes: nodes, Outputs: outputs, @@ -147,13 +147,13 @@ func generateName(wfID *core.Identifier, execID *core.WorkflowExecutionIdentifie name string, generateName string, label string, project string, domain string, err error) { if execID != nil { - return execID.Name, "", execID.Name, execID.Project, execID.Domain, nil + return execID.GetName(), "", execID.GetName(), execID.GetProject(), execID.GetDomain(), nil } else if wfID != nil { - wid := fmt.Sprintf("%v%v%v", withSeparatorIfNotEmpty(wfID.Project), withSeparatorIfNotEmpty(wfID.Domain), wfID.Name) + wid := fmt.Sprintf("%v%v%v", withSeparatorIfNotEmpty(wfID.GetProject()), withSeparatorIfNotEmpty(wfID.GetDomain()), wfID.GetName()) // TODO: this is a hack until we figure out how to restrict generated names. K8s has a limitation of 63 chars wid = wid[:minInt(32, len(wid))] - return "", fmt.Sprintf("%v-", wid), wid, wfID.Project, wfID.Domain, nil + return "", fmt.Sprintf("%v-", wid), wid, wfID.GetProject(), wfID.GetDomain(), nil } else { return "", "", "", "", "", fmt.Errorf("expected param not set. wfID or execID must be non-nil values") } @@ -169,8 +169,8 @@ func BuildFlyteWorkflow(wfClosure *core.CompiledWorkflowClosure, inputs *core.Li return nil, errs } - wf := wfClosure.Primary.Template - tasks := wfClosure.Tasks + wf := wfClosure.GetPrimary().GetTemplate() + tasks := wfClosure.GetTasks() // Fill in inputs in the start node. if inputs != nil { if ok := validateInputs(common.StartNodeID, wf.GetInterface(), *inputs, errs.NewScope()); !ok { @@ -182,22 +182,22 @@ func BuildFlyteWorkflow(wfClosure *core.CompiledWorkflowClosure, inputs *core.Li } for _, t := range tasks { - t.Template.Interface = StripInterfaceTypeMetadata(t.Template.Interface) + t.Template.Interface = StripInterfaceTypeMetadata(t.GetTemplate().GetInterface()) } - primarySpec, err := buildFlyteWorkflowSpec(wfClosure.Primary, tasks, errs.NewScope()) + primarySpec, err := buildFlyteWorkflowSpec(wfClosure.GetPrimary(), tasks, errs.NewScope()) if err != nil { errs.Collect(errors.NewWorkflowBuildError(err)) return nil, errs } - subwfs := make(map[v1alpha1.WorkflowID]*v1alpha1.WorkflowSpec, len(wfClosure.SubWorkflows)) - for _, subWf := range wfClosure.SubWorkflows { + subwfs := make(map[v1alpha1.WorkflowID]*v1alpha1.WorkflowSpec, len(wfClosure.GetSubWorkflows())) + for _, subWf := range wfClosure.GetSubWorkflows() { spec, err := buildFlyteWorkflowSpec(subWf, tasks, errs.NewScope()) if err != nil { errs.Collect(errors.NewWorkflowBuildError(err)) } else { - subwfs[subWf.Template.Id.String()] = spec + subwfs[subWf.GetTemplate().GetId().String()] = spec } } @@ -266,7 +266,7 @@ func BuildFlyteWorkflow(wfClosure *core.CompiledWorkflowClosure, inputs *core.Li func toMapOfLists(connections map[string]*core.ConnectionSet_IdList) map[string][]string { res := make(map[string][]string, len(connections)) for key, val := range connections { - res[key] = val.Ids + res[key] = val.GetIds() } return res @@ -292,24 +292,24 @@ func BuildWfClosureCrdFields(wfClosure *core.CompiledWorkflowClosure) (*WfClosur return nil, errs } - primarySpec, err := buildFlyteWorkflowSpec(wfClosure.Primary, wfClosure.Tasks, errs.NewScope()) + primarySpec, err := buildFlyteWorkflowSpec(wfClosure.GetPrimary(), wfClosure.GetTasks(), errs.NewScope()) if err != nil { errs.Collect(errors.NewWorkflowBuildError(err)) return nil, errs } - for _, t := range wfClosure.Tasks { - t.Template.Interface = StripInterfaceTypeMetadata(t.Template.Interface) + for _, t := range wfClosure.GetTasks() { + t.Template.Interface = StripInterfaceTypeMetadata(t.GetTemplate().GetInterface()) } - tasks := buildTasks(wfClosure.Tasks, errs.NewScope()) + tasks := buildTasks(wfClosure.GetTasks(), errs.NewScope()) - subwfs := make(map[v1alpha1.WorkflowID]*v1alpha1.WorkflowSpec, len(wfClosure.SubWorkflows)) - for _, subWf := range wfClosure.SubWorkflows { - spec, err := buildFlyteWorkflowSpec(subWf, wfClosure.Tasks, errs.NewScope()) + subwfs := make(map[v1alpha1.WorkflowID]*v1alpha1.WorkflowSpec, len(wfClosure.GetSubWorkflows())) + for _, subWf := range wfClosure.GetSubWorkflows() { + spec, err := buildFlyteWorkflowSpec(subWf, wfClosure.GetTasks(), errs.NewScope()) if err != nil { errs.Collect(errors.NewWorkflowBuildError(err)) } else { - subwfs[subWf.Template.Id.String()] = spec + subwfs[subWf.GetTemplate().GetId().String()] = spec } } diff --git a/flytepropeller/pkg/compiler/transformers/k8s/workflow_test.go b/flytepropeller/pkg/compiler/transformers/k8s/workflow_test.go index dbb51e25eb..378343ec20 100644 --- a/flytepropeller/pkg/compiler/transformers/k8s/workflow_test.go +++ b/flytepropeller/pkg/compiler/transformers/k8s/workflow_test.go @@ -331,10 +331,10 @@ func TestBuildFlyteWorkflow_withBranch(t *testing.T) { w := &core.CompiledWorkflowClosure{} assert.NoError(t, utils.UnmarshalBytesToPb(c, w)) - assert.Len(t, w.Primary.Connections.Downstream, 2) - ids := w.Primary.Connections.Downstream["start-node"] - assert.Len(t, ids.Ids, 1) - assert.Equal(t, ids.Ids[0], "n0") + assert.Len(t, w.GetPrimary().GetConnections().GetDownstream(), 2) + ids := w.GetPrimary().GetConnections().GetDownstream()["start-node"] + assert.Len(t, ids.GetIds(), 1) + assert.Equal(t, ids.GetIds()[0], "n0") wf, err := BuildFlyteWorkflow( w, diff --git a/flytepropeller/pkg/compiler/validators/bindings.go b/flytepropeller/pkg/compiler/validators/bindings.go index b69dda529f..fd317036fa 100644 --- a/flytepropeller/pkg/compiler/validators/bindings.go +++ b/flytepropeller/pkg/compiler/validators/bindings.go @@ -109,7 +109,7 @@ func validateBinding(w c.WorkflowBuilder, node c.Node, nodeParam string, binding return nil, nil, !errs.HasErrors() } - if upNode, found := validateNodeID(w, val.Promise.NodeId, errs.NewScope()); found { + if upNode, found := validateNodeID(w, val.Promise.GetNodeId(), errs.NewScope()); found { v, err := typing.ParseVarName(val.Promise.GetVar()) if err != nil { errs.Collect(errors.NewSyntaxError(nodeID, val.Promise.GetVar(), err)) @@ -117,28 +117,28 @@ func validateBinding(w c.WorkflowBuilder, node c.Node, nodeParam string, binding } inputVar := nodeParam - outputVar := val.Promise.Var + outputVar := val.Promise.GetVar() if node.GetMetadata() != nil { - inputVar = fmt.Sprintf("%s.%s", node.GetMetadata().Name, nodeParam) + inputVar = fmt.Sprintf("%s.%s", node.GetMetadata().GetName(), nodeParam) } if upNode.GetMetadata() != nil { - outputVar = fmt.Sprintf("%s.%s", upNode.GetMetadata().Name, val.Promise.Var) + outputVar = fmt.Sprintf("%s.%s", upNode.GetMetadata().GetName(), val.Promise.GetVar()) } if param, paramFound := validateOutputVar(upNode, v.Name, errs.NewScope()); paramFound { - sourceType := param.Type + sourceType := param.GetType() // If the variable has an index. We expect param to be a collection. if v.Index != nil { if cType := param.GetType().GetCollectionType(); cType == nil { - errs.Collect(errors.NewMismatchingVariablesErr(nodeID, outputVar, c.LiteralTypeToStr(param.Type), inputVar, c.LiteralTypeToStr(expectedType))) + errs.Collect(errors.NewMismatchingVariablesErr(nodeID, outputVar, c.LiteralTypeToStr(param.GetType()), inputVar, c.LiteralTypeToStr(expectedType))) } else { sourceType = cType } } // If the variable has an attribute path. Extract the type of the last attribute. - for _, attr := range val.Promise.AttrPath { + for _, attr := range val.Promise.GetAttrPath() { var tmpType *flyte.LiteralType var exist bool @@ -152,7 +152,7 @@ func validateBinding(w c.WorkflowBuilder, node c.Node, nodeParam string, binding if !exist { // the error should output the sourceType instead of tmpType because tmpType is nil - errs.Collect(errors.NewFieldNotFoundErr(nodeID, val.Promise.Var, sourceType.String(), attr.GetStringValue())) + errs.Collect(errors.NewFieldNotFoundErr(nodeID, val.Promise.GetVar(), sourceType.String(), attr.GetStringValue())) return nil, nil, !errs.HasErrors() } sourceType = tmpType @@ -161,7 +161,7 @@ func validateBinding(w c.WorkflowBuilder, node c.Node, nodeParam string, binding if !validateParamTypes || AreTypesCastable(sourceType, expectedType) { val.Promise.NodeId = upNode.GetId() - return param.GetType(), []c.NodeID{val.Promise.NodeId}, true + return param.GetType(), []c.NodeID{val.Promise.GetNodeId()}, true } errs.Collect(errors.NewMismatchingVariablesErr(node.GetId(), outputVar, c.LiteralTypeToStr(sourceType), inputVar, c.LiteralTypeToStr(expectedType))) @@ -187,14 +187,14 @@ func validateBinding(w c.WorkflowBuilder, node c.Node, nodeParam string, binding v := val.Scalar.GetPrimitive().GetStringValue() // Let us assert that the bound value is a correct enum Value found := false - for _, ev := range expectedType.GetEnumType().Values { + for _, ev := range expectedType.GetEnumType().GetValues() { if ev == v { found = true break } } if !found { - errs.Collect(errors.NewIllegalEnumValueError(nodeID, nodeParam, v, expectedType.GetEnumType().Values)) + errs.Collect(errors.NewIllegalEnumValueError(nodeID, nodeParam, v, expectedType.GetEnumType().GetValues())) } } @@ -237,7 +237,7 @@ func ValidateBindings(w c.WorkflowBuilder, node c.Node, bindings []*flyte.Bindin providedBindings.Insert(binding.GetVar()) if resolvedType, upstreamNodes, bindingOk := validateBinding(w, node, binding.GetVar(), binding.GetBinding(), - param.Type, errs.NewScope(), validateParamTypes); bindingOk { + param.GetType(), errs.NewScope(), validateParamTypes); bindingOk { for _, upNode := range upstreamNodes { // Add implicit Edges switch edgeDirection { @@ -259,7 +259,7 @@ func ValidateBindings(w c.WorkflowBuilder, node c.Node, bindings []*flyte.Bindin // If we missed binding some params, add errors if params != nil { - for paramName, Variable := range params.Variables { + for paramName, Variable := range params.GetVariables() { if !providedBindings.Has(paramName) && !IsOptionalType(*Variable) { errs.Collect(errors.NewParameterNotBoundErr(node.GetId(), paramName)) } @@ -271,10 +271,10 @@ func ValidateBindings(w c.WorkflowBuilder, node c.Node, bindings []*flyte.Bindin // IsOptionalType Return true if there is a None type in Union Type func IsOptionalType(variable flyte.Variable) bool { - if variable.Type.GetUnionType() == nil { + if variable.GetType().GetUnionType() == nil { return false } - for _, variant := range variable.Type.GetUnionType().Variants { + for _, variant := range variable.GetType().GetUnionType().GetVariants() { if flyte.SimpleType_NONE == variant.GetSimple() { return true } diff --git a/flytepropeller/pkg/compiler/validators/bindings_test.go b/flytepropeller/pkg/compiler/validators/bindings_test.go index e209023b5c..2975b340b9 100644 --- a/flytepropeller/pkg/compiler/validators/bindings_test.go +++ b/flytepropeller/pkg/compiler/validators/bindings_test.go @@ -23,8 +23,8 @@ func LiteralToBinding(l *core.Literal) *core.BindingData { }, } case *core.Literal_Collection: - x := make([]*core.BindingData, 0, len(l.GetCollection().Literals)) - for _, sub := range l.GetCollection().Literals { + x := make([]*core.BindingData, 0, len(l.GetCollection().GetLiterals())) + for _, sub := range l.GetCollection().GetLiterals() { x = append(x, LiteralToBinding(sub)) } @@ -36,8 +36,8 @@ func LiteralToBinding(l *core.Literal) *core.BindingData { }, } case *core.Literal_Map: - x := make(map[string]*core.BindingData, len(l.GetMap().Literals)) - for key, val := range l.GetMap().Literals { + x := make(map[string]*core.BindingData, len(l.GetMap().GetLiterals())) + for key, val := range l.GetMap().GetLiterals() { x[key] = LiteralToBinding(val) } @@ -62,7 +62,7 @@ func TestValidateBindings(t *testing.T) { compileErrors := compilerErrors.NewCompileErrors() resolved, ok := ValidateBindings(wf, n, bindings, vars, true, c.EdgeDirectionBidirectional, compileErrors) assert.True(t, ok) - assert.Empty(t, resolved.Variables) + assert.Empty(t, resolved.GetVariables()) }) t.Run("Variable not in inputs", func(t *testing.T) { diff --git a/flytepropeller/pkg/compiler/validators/branch.go b/flytepropeller/pkg/compiler/validators/branch.go index 386f1cecda..94e4bea7ad 100644 --- a/flytepropeller/pkg/compiler/validators/branch.go +++ b/flytepropeller/pkg/compiler/validators/branch.go @@ -18,17 +18,17 @@ func validateBranchInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs error return nil, false } - if ifBlock := node.GetBranchNode().IfElse; ifBlock == nil { + if ifBlock := node.GetBranchNode().GetIfElse(); ifBlock == nil { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "Branch.IfElse")) return nil, false } - if ifCase := node.GetBranchNode().IfElse.Case; ifCase == nil { + if ifCase := node.GetBranchNode().GetIfElse().GetCase(); ifCase == nil { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "Branch.IfElse.Case")) return nil, false } - if thenNode := node.GetBranchNode().IfElse.Case.ThenNode; thenNode == nil { + if thenNode := node.GetBranchNode().GetIfElse().GetCase().GetThenNode(); thenNode == nil { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "Branch.IfElse.Case.ThenNode")) return nil, false } @@ -37,33 +37,33 @@ func validateBranchInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs error finalOutputParameterNames := sets.NewString() validateIfaceMatch := func(nodeId string, iface2 *flyte.TypedInterface, errsScope errors.CompileErrors) (match bool) { - outputs2, outputs2Set := buildVariablesIndex(iface2.Outputs) + outputs2, outputs2Set := buildVariablesIndex(iface2.GetOutputs()) // Validate that parameters that exist in both interfaces have compatible types. finalOutputParameterNames = finalOutputParameterNames.Intersection(outputs2Set) for paramName := range finalOutputParameterNames { - if validateVarType(nodeId, paramName, outputs[paramName], outputs2[paramName].Type, errs.NewScope()) { - validateVarType(nodeId, paramName, outputs2[paramName], outputs[paramName].Type, errs.NewScope()) + if validateVarType(nodeId, paramName, outputs[paramName], outputs2[paramName].GetType(), errs.NewScope()) { + validateVarType(nodeId, paramName, outputs2[paramName], outputs[paramName].GetType(), errs.NewScope()) } } return !errsScope.HasErrors() } - cases := make([]*flyte.Node, 0, len(node.GetBranchNode().IfElse.Other)+1) - caseBlock := node.GetBranchNode().IfElse.Case - cases = append(cases, caseBlock.ThenNode) + cases := make([]*flyte.Node, 0, len(node.GetBranchNode().GetIfElse().GetOther())+1) + caseBlock := node.GetBranchNode().GetIfElse().GetCase() + cases = append(cases, caseBlock.GetThenNode()) - otherCases := node.GetBranchNode().IfElse.Other + otherCases := node.GetBranchNode().GetIfElse().GetOther() for _, otherCase := range otherCases { - if otherCase.ThenNode == nil { + if otherCase.GetThenNode() == nil { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "IfElse.Case.ThenNode")) continue } - cases = append(cases, otherCase.ThenNode) + cases = append(cases, otherCase.GetThenNode()) } - if elseNode := node.GetBranchNode().IfElse.GetElseNode(); elseNode != nil { + if elseNode := node.GetBranchNode().GetIfElse().GetElseNode(); elseNode != nil { cases = append(cases, elseNode) } @@ -79,12 +79,12 @@ func validateBranchInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs error // match. We will pull the inputs needed for the underlying branch node at runtime. iface2 = &flyte.TypedInterface{ Inputs: &flyte.VariableMap{Variables: map[string]*flyte.Variable{}}, - Outputs: iface2.Outputs, + Outputs: iface2.GetOutputs(), } if iface == nil { iface = iface2 - outputs, finalOutputParameterNames = buildVariablesIndex(iface.Outputs) + outputs, finalOutputParameterNames = buildVariablesIndex(iface.GetOutputs()) } else { validateIfaceMatch(n.GetId(), iface2, errs.NewScope()) } @@ -99,7 +99,7 @@ func validateBranchInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs error if !errs.HasErrors() && iface != nil { iface = &flyte.TypedInterface{ Inputs: inputVarsFromBindings, - Outputs: filterVariables(iface.Outputs, finalOutputParameterNames), + Outputs: filterVariables(iface.GetOutputs(), finalOutputParameterNames), } } else { iface = nil diff --git a/flytepropeller/pkg/compiler/validators/condition.go b/flytepropeller/pkg/compiler/validators/condition.go index 70b72cde8a..c402040135 100644 --- a/flytepropeller/pkg/compiler/validators/condition.go +++ b/flytepropeller/pkg/compiler/validators/condition.go @@ -48,8 +48,8 @@ func ValidateBooleanExpression(w c.WorkflowBuilder, node c.NodeBuilder, expr *fl } } } else if expr.GetConjunction() != nil { - ValidateBooleanExpression(w, node, expr.GetConjunction().LeftExpression, requireParamType, errs.NewScope()) - ValidateBooleanExpression(w, node, expr.GetConjunction().RightExpression, requireParamType, errs.NewScope()) + ValidateBooleanExpression(w, node, expr.GetConjunction().GetLeftExpression(), requireParamType, errs.NewScope()) + ValidateBooleanExpression(w, node, expr.GetConjunction().GetRightExpression(), requireParamType, errs.NewScope()) } else { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "Expr")) } diff --git a/flytepropeller/pkg/compiler/validators/interface.go b/flytepropeller/pkg/compiler/validators/interface.go index a71c52e49a..fe22a9fb48 100644 --- a/flytepropeller/pkg/compiler/validators/interface.go +++ b/flytepropeller/pkg/compiler/validators/interface.go @@ -17,14 +17,14 @@ func ValidateInterface(nodeID c.NodeID, iface *core.TypedInterface, errs errors. } // validate InputsRef/OutputsRef parameters required attributes are set - if iface.Inputs != nil && iface.Inputs.Variables != nil { - validateVariables(nodeID, iface.Inputs, errs.NewScope()) + if iface.GetInputs() != nil && iface.Inputs.Variables != nil { + validateVariables(nodeID, iface.GetInputs(), errs.NewScope()) } else { iface.Inputs = &core.VariableMap{Variables: map[string]*core.Variable{}} } - if iface.Outputs != nil && iface.Outputs.Variables != nil { - validateVariables(nodeID, iface.Outputs, errs.NewScope()) + if iface.GetOutputs() != nil && iface.Outputs.Variables != nil { + validateVariables(nodeID, iface.GetOutputs(), errs.NewScope()) } else { iface.Outputs = &core.VariableMap{Variables: map[string]*core.Variable{}} } @@ -55,8 +55,8 @@ func ValidateUnderlyingInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs e errs.Collect(errors.NewTaskReferenceNotFoundErr(node.GetId(), node.GetTaskNode().GetReferenceId().String())) } case *core.Node_WorkflowNode: - if node.GetWorkflowNode().GetLaunchplanRef().String() == w.GetCoreWorkflow().Template.Id.String() { - iface = w.GetCoreWorkflow().Template.Interface + if node.GetWorkflowNode().GetLaunchplanRef().String() == w.GetCoreWorkflow().GetTemplate().GetId().String() { + iface = w.GetCoreWorkflow().GetTemplate().GetInterface() if iface == nil { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "WorkflowNode.Interface")) } @@ -75,11 +75,11 @@ func ValidateUnderlyingInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs e // Compute exposed inputs as the union of all required inputs and any input overwritten by the node. exposedInputs := map[string]*core.Variable{} if inputs != nil && inputs.Parameters != nil { - for name, p := range inputs.Parameters { + for name, p := range inputs.GetParameters() { if p.GetRequired() { - exposedInputs[name] = p.Var + exposedInputs[name] = p.GetVar() } else if containsBindingByVariableName(node.GetInputs(), name) { - exposedInputs[name] = p.Var + exposedInputs[name] = p.GetVar() } // else, the param has a default value and is not being overwritten by the node } @@ -98,10 +98,10 @@ func ValidateUnderlyingInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs e } } else if node.GetWorkflowNode().GetSubWorkflowRef() != nil { if wf, wfOk := w.GetSubWorkflow(node.GetWorkflowNode().GetSubWorkflowRef()); wfOk { - if wf.Template == nil { + if wf.GetTemplate() == nil { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "WorkflowNode.Template")) } else { - iface = wf.Template.Interface + iface = wf.GetTemplate().GetInterface() if iface == nil { errs.Collect(errors.NewValueRequiredErr(node.GetId(), "WorkflowNode.Template.Interface")) } @@ -155,7 +155,7 @@ func ValidateUnderlyingInterface(w c.WorkflowBuilder, node c.NodeBuilder, errs e } case *core.Node_ArrayNode: arrayNode := node.GetArrayNode() - underlyingNodeBuilder := w.GetOrCreateNodeBuilder(arrayNode.Node) + underlyingNodeBuilder := w.GetOrCreateNodeBuilder(arrayNode.GetNode()) if underlyingIface, ok := ValidateUnderlyingInterface(w, underlyingNodeBuilder, errs.NewScope()); ok { // ArrayNode interface should be inferred from the underlying node interface. flytekit // will correct wrap variables in collections as needed, leaving partials as is. diff --git a/flytepropeller/pkg/compiler/validators/interface_test.go b/flytepropeller/pkg/compiler/validators/interface_test.go index 85c031c0a7..ba987bda62 100644 --- a/flytepropeller/pkg/compiler/validators/interface_test.go +++ b/flytepropeller/pkg/compiler/validators/interface_test.go @@ -66,10 +66,10 @@ func assertNonEmptyInterface(t testing.TB, iface *core.TypedInterface, ifaceOk b t.Fatal(errs) } - assert.NotNil(t, iface.Inputs) - assert.NotNil(t, iface.Inputs.Variables) - assert.NotNil(t, iface.Outputs) - assert.NotNil(t, iface.Outputs.Variables) + assert.NotNil(t, iface.GetInputs()) + assert.NotNil(t, iface.GetInputs().GetVariables()) + assert.NotNil(t, iface.GetOutputs()) + assert.NotNil(t, iface.GetOutputs().GetVariables()) } func TestValidateUnderlyingInterface(t *testing.T) { @@ -419,8 +419,8 @@ func TestValidateUnderlyingInterface(t *testing.T) { taskNodeBuilder := &mocks.NodeBuilder{} taskNodeBuilder.On("GetCoreNode").Return(taskNode) - taskNodeBuilder.On("GetId").Return(taskNode.Id) - taskNodeBuilder.On("GetTaskNode").Return(taskNode.Target.(*core.Node_TaskNode).TaskNode) + taskNodeBuilder.On("GetId").Return(taskNode.GetId()) + taskNodeBuilder.On("GetTaskNode").Return(taskNode.GetTarget().(*core.Node_TaskNode).TaskNode) taskNodeBuilder.On("GetInterface").Return(nil) taskNodeBuilder.On("SetInterface", mock.AnythingOfType("*core.TypedInterface")).Return(nil) @@ -431,7 +431,7 @@ func TestValidateUnderlyingInterface(t *testing.T) { }).String() })).Return(&task, true) wfBuilder.On("GetOrCreateNodeBuilder", mock.MatchedBy(func(node *core.Node) bool { - return node.Id == "node_1" + return node.GetId() == "node_1" })).Return(taskNodeBuilder) // mock array node @@ -445,9 +445,9 @@ func TestValidateUnderlyingInterface(t *testing.T) { } nodeBuilder := mocks.NodeBuilder{} - nodeBuilder.On("GetArrayNode").Return(arrayNode.Target.(*core.Node_ArrayNode).ArrayNode) + nodeBuilder.On("GetArrayNode").Return(arrayNode.GetTarget().(*core.Node_ArrayNode).ArrayNode) nodeBuilder.On("GetCoreNode").Return(arrayNode) - nodeBuilder.On("GetId").Return(arrayNode.Id) + nodeBuilder.On("GetId").Return(arrayNode.GetId()) nodeBuilder.On("GetInterface").Return(nil) nodeBuilder.On("SetInterface", mock.Anything).Return() diff --git a/flytepropeller/pkg/compiler/validators/node.go b/flytepropeller/pkg/compiler/validators/node.go index ad43abdce3..1b8b97ae12 100644 --- a/flytepropeller/pkg/compiler/validators/node.go +++ b/flytepropeller/pkg/compiler/validators/node.go @@ -15,19 +15,19 @@ func validateEffectiveOutputParameters(n c.NodeBuilder, errs errors.CompileError params *flyte.VariableMap, ok bool) { aliases := make(map[string]string, len(n.GetOutputAliases())) for _, alias := range n.GetOutputAliases() { - if _, found := aliases[alias.Var]; found { - errs.Collect(errors.NewDuplicateAliasErr(n.GetId(), alias.Alias)) + if _, found := aliases[alias.GetVar()]; found { + errs.Collect(errors.NewDuplicateAliasErr(n.GetId(), alias.GetAlias())) } else { - aliases[alias.Var] = alias.Alias + aliases[alias.GetVar()] = alias.GetAlias() } } if n.GetInterface() != nil { params = &flyte.VariableMap{ - Variables: make(map[string]*flyte.Variable, len(n.GetInterface().GetOutputs().Variables)), + Variables: make(map[string]*flyte.Variable, len(n.GetInterface().GetOutputs().GetVariables())), } - for paramName, param := range n.GetInterface().GetOutputs().Variables { + for paramName, param := range n.GetInterface().GetOutputs().GetVariables() { if alias, found := aliases[paramName]; found { if newParam, paramOk := withVariableName(param); paramOk { params.Variables[alias] = newParam @@ -57,19 +57,19 @@ func branchNodeIDFormatter(parentNodeID, thenNodeID string) string { func ValidateBranchNode(w c.WorkflowBuilder, n c.NodeBuilder, requireParamType bool, errs errors.CompileErrors) ( discoveredNodes []c.NodeBuilder, ok bool) { - cases := make([]*flyte.IfBlock, 0, len(n.GetBranchNode().IfElse.Other)+1) - if n.GetBranchNode().IfElse.Case == nil { + cases := make([]*flyte.IfBlock, 0, len(n.GetBranchNode().GetIfElse().GetOther())+1) + if n.GetBranchNode().GetIfElse().GetCase() == nil { errs.Collect(errors.NewBranchNodeHasNoCondition(n.GetId())) } else { - cases = append(cases, n.GetBranchNode().IfElse.Case) + cases = append(cases, n.GetBranchNode().GetIfElse().GetCase()) } - cases = append(cases, n.GetBranchNode().IfElse.Other...) + cases = append(cases, n.GetBranchNode().GetIfElse().GetOther()...) discoveredNodes = make([]c.NodeBuilder, 0, len(cases)) subNodes := make([]c.NodeBuilder, 0, len(cases)+1) for _, block := range cases { // Validate condition - ValidateBooleanExpression(w, n, block.Condition, requireParamType, errs.NewScope()) + ValidateBooleanExpression(w, n, block.GetCondition(), requireParamType, errs.NewScope()) if block.GetThenNode() == nil { errs.Collect(errors.NewBranchNodeNotSpecified(n.GetId())) @@ -79,10 +79,10 @@ func ValidateBranchNode(w c.WorkflowBuilder, n c.NodeBuilder, requireParamType b } } - if elseNode := n.GetBranchNode().IfElse.GetElseNode(); elseNode != nil { + if elseNode := n.GetBranchNode().GetIfElse().GetElseNode(); elseNode != nil { wrapperNode := w.GetOrCreateNodeBuilder(elseNode) subNodes = append(subNodes, wrapperNode) - } else if defaultElse := n.GetBranchNode().IfElse.GetDefault(); defaultElse == nil { + } else if defaultElse := n.GetBranchNode().GetIfElse().GetDefault(); defaultElse == nil { errs.Collect(errors.NewBranchNodeHasNoDefault(n.GetId())) } @@ -126,7 +126,7 @@ func ValidateNode(w c.WorkflowBuilder, n c.NodeBuilder, validateConditionTypes b } // Order upstream node ids to ensure consistent output of the compiler even if client ordering changes. - sort.Strings(n.GetCoreNode().UpstreamNodeIds) + sort.Strings(n.GetCoreNode().GetUpstreamNodeIds()) // Validate branch node conditions and inner nodes. if n.GetBranchNode() != nil { diff --git a/flytepropeller/pkg/compiler/validators/node_test.go b/flytepropeller/pkg/compiler/validators/node_test.go index 3982b71344..642f568593 100644 --- a/flytepropeller/pkg/compiler/validators/node_test.go +++ b/flytepropeller/pkg/compiler/validators/node_test.go @@ -64,7 +64,7 @@ func TestValidateNode(t *testing.T) { coreN.UpstreamNodeIds = []string{"n1", "n0"} n.OnGetCoreNode().Return(coreN) n.On("GetUpstreamNodeIds").Return(func() []string { - return coreN.UpstreamNodeIds + return coreN.GetUpstreamNodeIds() }) wf := &mocks.WorkflowBuilder{} diff --git a/flytepropeller/pkg/compiler/validators/typing.go b/flytepropeller/pkg/compiler/validators/typing.go index 2bde60b47b..ca1ca03148 100644 --- a/flytepropeller/pkg/compiler/validators/typing.go +++ b/flytepropeller/pkg/compiler/validators/typing.go @@ -1,11 +1,18 @@ package validators import ( + "bytes" + "context" + "encoding/json" "strings" structpb "github.com/golang/protobuf/ptypes/struct" + "github.com/santhosh-tekuri/jsonschema" + "github.com/wI2L/jsondiff" + jscmp "gitlab.com/yvesf/json-schema-compare" flyte "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" + "github.com/flyteorg/flyte/flytestdlib/logger" ) type typeChecker interface { @@ -16,6 +23,85 @@ type trivialChecker struct { literalType *flyte.LiteralType } +func isSuperTypeInJSON(sourceMetaData, targetMetaData *structpb.Struct) bool { + // Check if the source schema is a supertype of the target schema, beyond simple inheritance. + // For custom types, we expect the JSON schemas in the metadata to come from the same JSON schema package, + // specifically draft 2020-12 from Mashumaro. + + srcSchemaBytes, err := json.Marshal(sourceMetaData.GetFields()) + if err != nil { + logger.Infof(context.Background(), "Failed to marshal source metadata: [%v]", err) + return false + } + tgtSchemaBytes, err := json.Marshal(targetMetaData.GetFields()) + if err != nil { + logger.Infof(context.Background(), "Failed to marshal target metadata: [%v]", err) + return false + } + + compiler := jsonschema.NewCompiler() + + err = compiler.AddResource("src", bytes.NewReader(srcSchemaBytes)) + if err != nil { + logger.Infof(context.Background(), "Failed to add resource to compiler: [%v]", err) + return false + } + err = compiler.AddResource("tgt", bytes.NewReader(tgtSchemaBytes)) + if err != nil { + logger.Infof(context.Background(), "Failed to add resource to compiler: [%v]", err) + return false + } + + srcSchema, err := compiler.Compile("src") + if err != nil { + logger.Infof(context.Background(), "Failed to compile source schema: [%v]", err) + return false + } + tgtSchema, err := compiler.Compile("tgt") + if err != nil { + logger.Infof(context.Background(), "Failed to compile target schema: [%v]", err) + return false + } + + // Compare the two schemas + errs := jscmp.Compare(tgtSchema, srcSchema) + + // Ignore the "not implemented" errors from json-schema-compare (additionalProperties, additionalItems, etc.) + // While handling nested structures, we might have multiple "not implemented" errors for a single field as well. + // If all the errors are "not implemented", we can consider the source schema as a supertype of the target schema. + for _, err := range errs { + if !strings.Contains(err.Error(), "not implemented") { + return false + } + } + + return true +} + +func isSameTypeInJSON(sourceMetaData, targetMetaData *structpb.Struct) bool { + srcSchemaBytes, err := json.Marshal(sourceMetaData.GetFields()) + if err != nil { + logger.Infof(context.Background(), "Failed to marshal source metadata: [%v]", err) + return false + } + + tgtSchemaBytes, err := json.Marshal(targetMetaData.GetFields()) + if err != nil { + logger.Infof(context.Background(), "Failed to marshal target metadata: [%v]", err) + return false + } + + // Use jsondiff to compare the two schemas + patch, err := jsondiff.CompareJSON(srcSchemaBytes, tgtSchemaBytes) + if err != nil { + logger.Infof(context.Background(), "Failed to compare JSON schemas: [%v]", err) + return false + } + + // If the length of the patch is zero, the two JSON structures are identical + return len(patch) == 0 +} + // CastsFrom is a trivial type checker merely checks if types match exactly. func (t trivialChecker) CastsFrom(upstreamType *flyte.LiteralType) bool { // If upstream is an enum, it can be consumed as a string downstream @@ -35,6 +121,23 @@ func (t trivialChecker) CastsFrom(upstreamType *flyte.LiteralType) bool { return false } + // Related Issue: https://github.com/flyteorg/flyte/issues/5489 + // RFC: https://github.com/flyteorg/flyte/blob/master/rfc/system/5741-binary-idl-with-message-pack.md#flytepropeller + if upstreamType.GetSimple() == flyte.SimpleType_STRUCT && t.literalType.GetSimple() == flyte.SimpleType_STRUCT { + // Json Schema is stored in Metadata + upstreamMetaData := upstreamType.GetMetadata() + downstreamMetaData := t.literalType.GetMetadata() + + // There's bug in flytekit's dataclass Transformer to generate JSON Scheam before, + // in some case, we the JSON Schema will be nil, so we can only pass it to support + // backward compatible. (reference task should be supported.) + if upstreamMetaData == nil || downstreamMetaData == nil { + return true + } + + return isSameTypeInJSON(upstreamMetaData, downstreamMetaData) || isSuperTypeInJSON(upstreamMetaData, downstreamMetaData) + } + // Ignore metadata when comparing types. upstreamTypeCopy := *upstreamType downstreamTypeCopy := *t.literalType @@ -136,7 +239,7 @@ func (t schemaTypeChecker) CastsFrom(upstreamType *flyte.LiteralType) bool { } // Flyte Schema can only be serialized to parquet - if len(structuredDatasetType.Format) != 0 && !strings.EqualFold(structuredDatasetType.Format, "parquet") { + if len(structuredDatasetType.GetFormat()) != 0 && !strings.EqualFold(structuredDatasetType.GetFormat(), "parquet") { return false } @@ -168,7 +271,7 @@ func (t structuredDatasetChecker) CastsFrom(upstreamType *flyte.LiteralType) boo } if schemaType != nil { // Flyte Schema can only be serialized to parquet - format := t.literalType.GetStructuredDatasetType().Format + format := t.literalType.GetStructuredDatasetType().GetFormat() if len(format) != 0 && !strings.EqualFold(format, "parquet") { return false } @@ -179,22 +282,22 @@ func (t structuredDatasetChecker) CastsFrom(upstreamType *flyte.LiteralType) boo // Upstream (schema) -> downstream (schema) func schemaCastFromSchema(upstream *flyte.SchemaType, downstream *flyte.SchemaType) bool { - if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { + if len(upstream.GetColumns()) == 0 || len(downstream.GetColumns()) == 0 { return true } nameToTypeMap := make(map[string]flyte.SchemaType_SchemaColumn_SchemaColumnType) - for _, column := range upstream.Columns { - nameToTypeMap[column.Name] = column.Type + for _, column := range upstream.GetColumns() { + nameToTypeMap[column.GetName()] = column.GetType() } // Check that the downstream schema is a strict sub-set of the upstream schema. - for _, column := range downstream.Columns { - upstreamType, ok := nameToTypeMap[column.Name] + for _, column := range downstream.GetColumns() { + upstreamType, ok := nameToTypeMap[column.GetName()] if !ok { return false } - if upstreamType != column.Type { + if upstreamType != column.GetType() { return false } } @@ -244,26 +347,26 @@ func (t unionTypeChecker) CastsFrom(upstreamType *flyte.LiteralType) bool { // Upstream (structuredDatasetType) -> downstream (structuredDatasetType) func structuredDatasetCastFromStructuredDataset(upstream *flyte.StructuredDatasetType, downstream *flyte.StructuredDatasetType) bool { // Skip the format check here when format is empty. https://github.com/flyteorg/flyte/issues/2864 - if len(upstream.Format) != 0 && len(downstream.Format) != 0 && !strings.EqualFold(upstream.Format, downstream.Format) { + if len(upstream.GetFormat()) != 0 && len(downstream.GetFormat()) != 0 && !strings.EqualFold(upstream.GetFormat(), downstream.GetFormat()) { return false } - if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { + if len(upstream.GetColumns()) == 0 || len(downstream.GetColumns()) == 0 { return true } nameToTypeMap := make(map[string]*flyte.LiteralType) - for _, column := range upstream.Columns { - nameToTypeMap[column.Name] = column.LiteralType + for _, column := range upstream.GetColumns() { + nameToTypeMap[column.GetName()] = column.GetLiteralType() } // Check that the downstream structured dataset is a strict sub-set of the upstream structured dataset. - for _, column := range downstream.Columns { - upstreamType, ok := nameToTypeMap[column.Name] + for _, column := range downstream.GetColumns() { + upstreamType, ok := nameToTypeMap[column.GetName()] if !ok { return false } - if !getTypeChecker(column.LiteralType).CastsFrom(upstreamType) { + if !getTypeChecker(column.GetLiteralType()).CastsFrom(upstreamType) { return false } } @@ -272,21 +375,21 @@ func structuredDatasetCastFromStructuredDataset(upstream *flyte.StructuredDatase // Upstream (schemaType) -> downstream (structuredDatasetType) func structuredDatasetCastFromSchema(upstream *flyte.SchemaType, downstream *flyte.StructuredDatasetType) bool { - if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { + if len(upstream.GetColumns()) == 0 || len(downstream.GetColumns()) == 0 { return true } nameToTypeMap := make(map[string]flyte.SchemaType_SchemaColumn_SchemaColumnType) - for _, column := range upstream.Columns { - nameToTypeMap[column.Name] = column.GetType() + for _, column := range upstream.GetColumns() { + nameToTypeMap[column.GetName()] = column.GetType() } // Check that the downstream structuredDataset is a strict sub-set of the upstream schema. - for _, column := range downstream.Columns { - upstreamType, ok := nameToTypeMap[column.Name] + for _, column := range downstream.GetColumns() { + upstreamType, ok := nameToTypeMap[column.GetName()] if !ok { return false } - if !schemaTypeIsMatchStructuredDatasetType(upstreamType, column.LiteralType.GetSimple()) { + if !schemaTypeIsMatchStructuredDatasetType(upstreamType, column.GetLiteralType().GetSimple()) { return false } } @@ -295,17 +398,17 @@ func structuredDatasetCastFromSchema(upstream *flyte.SchemaType, downstream *fly // Upstream (structuredDatasetType) -> downstream (schemaType) func schemaCastFromStructuredDataset(upstream *flyte.StructuredDatasetType, downstream *flyte.SchemaType) bool { - if len(upstream.Columns) == 0 || len(downstream.Columns) == 0 { + if len(upstream.GetColumns()) == 0 || len(downstream.GetColumns()) == 0 { return true } nameToTypeMap := make(map[string]flyte.SimpleType) - for _, column := range upstream.Columns { - nameToTypeMap[column.Name] = column.LiteralType.GetSimple() + for _, column := range upstream.GetColumns() { + nameToTypeMap[column.GetName()] = column.GetLiteralType().GetSimple() } // Check that the downstream schema is a strict sub-set of the upstream structuredDataset. - for _, column := range downstream.Columns { - upstreamType, ok := nameToTypeMap[column.Name] + for _, column := range downstream.GetColumns() { + upstreamType, ok := nameToTypeMap[column.GetName()] if !ok { return false } diff --git a/flytepropeller/pkg/compiler/validators/typing_test.go b/flytepropeller/pkg/compiler/validators/typing_test.go index f2e407b986..2f5bc5531d 100644 --- a/flytepropeller/pkg/compiler/validators/typing_test.go +++ b/flytepropeller/pkg/compiler/validators/typing_test.go @@ -5,6 +5,7 @@ import ( structpb "github.com/golang/protobuf/ptypes/struct" "github.com/stretchr/testify/assert" + structpb2 "google.golang.org/protobuf/types/known/structpb" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" ) @@ -557,6 +558,837 @@ func TestMapCasting(t *testing.T) { assert.True(t, castable, "castable from Struct to struct") }) + t.Run("SameDataclassOneLevel(draft 2020-12)", func(t *testing.T) { + /* + @dataclass + class A: + a: int + */ + castable := AreTypesCastable( + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + ) + assert.True(t, castable, "same dataclass castable with one level properties") + }) + + t.Run("SameDataclassTwoLevel(draft 2020-12)", func(t *testing.T) { + /* + @dataclass + class A: + a: int + + @dataclass + class B: + b: A + */ + + castable := AreTypesCastable( + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "b"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "B"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "b": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "required": { + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": { + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "b"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "B"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "b": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "required": { + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": { + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + ) + assert.True(t, castable, "same dataclass castable with two level properties") + }) + + t.Run("DiffDataclassTwoLevel(draft 2020-12)", func(t *testing.T) { + /* + @dataclass + class A: + a: int + + @dataclass + class B: + b: A + + @dataclass + class C: + c: str + + @dataclass + class D: + d: C + + Compare B and D + */ + + castable := AreTypesCastable( + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "b"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "B"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "b": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "required": { + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": { + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "d"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "D"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "d": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "required": { + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "c"}}}, + }, + }, + }, + "title": { + Kind: &structpb.Value_StringValue{StringValue: "C"}, + }, + "properties": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "c": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "string"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + ) + assert.False(t, castable, "different dataclass with two level properties not castable") + }) + + t.Run("SameBaseModelOneLevel(draft 2020-12)", func(t *testing.T) { + /* + class A(BaseModel): + a: int + */ + castable := AreTypesCastable( + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + }, + }, + }, + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + }, + }, + }, + ) + assert.True(t, castable, "same basemodel castable with one level properties") + }) + + t.Run("BigToSmallAndChildToParent(dataclass draft 2020-12)", func(t *testing.T) { + /* + @dataclass + class A: + a: int + + @dataclass + class B(A): + b: Optional[str] = None + */ + castable := AreTypesCastable( + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{ + {Kind: &structpb.Value_StringValue{StringValue: "a"}}, + {Kind: &structpb.Value_StringValue{StringValue: "b"}}, + }, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "B"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + "b": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "default": { + Kind: &structpb.Value_NullValue{}, + }, + "anyOf": { + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{ + { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "string"}, + }, + }, + }, + }, + }, + { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "null"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + ) + assert.True(t, castable, "dataclass castable from child to parent (big to small)") + }) + + t.Run("SmallToBigAndParentToChild(dataclass draft 2020-12)", func(t *testing.T) { + /* + @dataclass + class A: + a: int + + @dataclass + class B(A): + b: Optional[str] = None + */ + castable := AreTypesCastable( + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{{Kind: &structpb.Value_StringValue{StringValue: "a"}}}, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "A"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + &core.LiteralType{ + Type: &core.LiteralType_Simple{ + Simple: core.SimpleType_STRUCT, + }, + Metadata: &structpb.Struct{ + Fields: map[string]*structpb2.Value{ + "required": &structpb.Value{ + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{ + {Kind: &structpb.Value_StringValue{StringValue: "a"}}, + {Kind: &structpb.Value_StringValue{StringValue: "b"}}, + }, + }, + }, + }, + "title": &structpb.Value{ + Kind: &structpb.Value_StringValue{StringValue: "B"}, + }, + "properties": &structpb.Value{ + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "a": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "integer"}, + }, + }, + }, + }, + }, + "b": { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "default": { + Kind: &structpb.Value_NullValue{}, + }, + "anyOf": { + Kind: &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: []*structpb.Value{ + { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "string"}, + }, + }, + }, + }, + }, + { + Kind: &structpb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "type": { + Kind: &structpb.Value_StringValue{StringValue: "null"}, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "type": { + Kind: &structpb.Value_StringValue{StringValue: "object"}, + }, + "AdditionalProperties": { + Kind: &structpb.Value_BoolValue{BoolValue: false}, + }, + }, + }, + }, + ) + assert.False(t, castable, "dataclass not castable from parent to child (small to big)") + }) + t.Run("MismatchedMapNestLevels_Scalar", func(t *testing.T) { castable := AreTypesCastable( &core.LiteralType{ diff --git a/flytepropeller/pkg/compiler/validators/utils.go b/flytepropeller/pkg/compiler/validators/utils.go index 038a06ffa3..bf9047935c 100644 --- a/flytepropeller/pkg/compiler/validators/utils.go +++ b/flytepropeller/pkg/compiler/validators/utils.go @@ -13,7 +13,7 @@ import ( func containsBindingByVariableName(bindings []*core.Binding, name string) (found bool) { for _, b := range bindings { - if b.Var == name { + if b.GetVar() == name { return true } } @@ -26,7 +26,7 @@ func findVariableByName(vars *core.VariableMap, name string) (variable *core.Var return nil, false } - variable, found = vars.Variables[name] + variable, found = vars.GetVariables()[name] return } @@ -47,7 +47,7 @@ func literalTypeForScalar(scalar *core.Scalar) *core.LiteralType { // If the binary has a tag, treat it as a structured type (e.g., dict, dataclass, Pydantic BaseModel). // Otherwise, treat it as raw binary data. // Reference: https://github.com/flyteorg/flyte/blob/master/rfc/system/5741-binary-idl-with-message-pack.md - if v.Binary.Tag == coreutils.MESSAGEPACK { + if v.Binary.GetTag() == coreutils.MESSAGEPACK { literalType = &core.LiteralType{Type: &core.LiteralType_Simple{Simple: core.SimpleType_STRUCT}} } else { literalType = &core.LiteralType{Type: &core.LiteralType_Simple{Simple: core.SimpleType_BINARY}} @@ -55,11 +55,11 @@ func literalTypeForScalar(scalar *core.Scalar) *core.LiteralType { case *core.Scalar_Schema: literalType = &core.LiteralType{ Type: &core.LiteralType_Schema{ - Schema: scalar.GetSchema().Type, + Schema: scalar.GetSchema().GetType(), }, } case *core.Scalar_StructuredDataset: - if v.StructuredDataset == nil || v.StructuredDataset.Metadata == nil { + if v.StructuredDataset == nil || v.StructuredDataset.GetMetadata() == nil { return &core.LiteralType{ Type: &core.LiteralType_StructuredDatasetType{}, } @@ -67,7 +67,7 @@ func literalTypeForScalar(scalar *core.Scalar) *core.LiteralType { literalType = &core.LiteralType{ Type: &core.LiteralType_StructuredDatasetType{ - StructuredDatasetType: scalar.GetStructuredDataset().GetMetadata().StructuredDatasetType, + StructuredDatasetType: scalar.GetStructuredDataset().GetMetadata().GetStructuredDatasetType(), }, } case *core.Scalar_NoneType: @@ -114,9 +114,9 @@ func literalTypeForPrimitive(primitive *core.Primitive) *core.LiteralType { } func buildVariablesIndex(params *core.VariableMap) (map[string]*core.Variable, sets.String) { - paramMap := make(map[string]*core.Variable, len(params.Variables)) + paramMap := make(map[string]*core.Variable, len(params.GetVariables())) paramSet := sets.NewString() - for paramName, param := range params.Variables { + for paramName, param := range params.GetVariables() { paramMap[paramName] = param paramSet.Insert(paramName) } @@ -129,7 +129,7 @@ func filterVariables(vars *core.VariableMap, varNames sets.String) *core.Variabl Variables: make(map[string]*core.Variable, len(varNames)), } - for paramName, param := range vars.Variables { + for paramName, param := range vars.GetVariables() { if varNames.Has(paramName) { res.Variables[paramName] = param } @@ -157,9 +157,9 @@ func UnionDistinctVariableMaps(m1, m2 map[string]*core.Variable) (map[string]*co for k, v := range m2 { if existingV, exists := res[k]; exists { - if v.Type.String() != existingV.Type.String() { + if v.GetType().String() != existingV.GetType().String() { return nil, fmt.Errorf("key already exists with a different type. %v has type [%v] on one side "+ - "and type [%v] on the other", k, existingV.Type.String(), v.Type.String()) + "and type [%v] on the other", k, existingV.GetType().String(), v.GetType().String()) } } @@ -232,12 +232,8 @@ func (t collectionInstanceChecker) isInstance(lit *core.Literal) bool { if _, ok := lit.GetValue().(*core.Literal_Collection); !ok { return false } - for _, x := range lit.GetCollection().Literals { - if _, ok := x.GetValue().(*core.Literal_OffloadedMetadata); ok { - if !AreTypesCastable(x.GetOffloadedMetadata().GetInferredType(), t.literalType) { - return false - } - } else if !IsInstance(x, t.literalType.GetCollectionType()) { + for _, x := range lit.GetCollection().GetLiterals() { + if !IsInstance(x, t.literalType.GetCollectionType()) { return false } } @@ -252,12 +248,8 @@ func (t mapInstanceChecker) isInstance(lit *core.Literal) bool { if _, ok := lit.GetValue().(*core.Literal_Map); !ok { return false } - for _, x := range lit.GetMap().Literals { - if _, ok := x.GetValue().(*core.Literal_OffloadedMetadata); ok { - if !AreTypesCastable(x.GetOffloadedMetadata().GetInferredType(), t.literalType) { - return false - } - } else if !IsInstance(x, t.literalType.GetMapValueType()) { + for _, x := range lit.GetMap().GetLiterals() { + if !IsInstance(x, t.literalType.GetMapValueType()) { return false } } @@ -269,7 +261,6 @@ type blobInstanceChecker struct { } func (t blobInstanceChecker) isInstance(lit *core.Literal) bool { - //scalar.GetBlob().GetMetadata().GetType() if _, ok := lit.GetScalar().GetValue().(*core.Scalar_Blob); !ok { return false } @@ -299,12 +290,12 @@ func (t schemaInstanceChecker) isInstance(lit *core.Literal) bool { switch v := scalar.GetValue().(type) { case *core.Scalar_Schema: - return schemaCastFromSchema(scalar.GetSchema().Type, t.literalType.GetSchema()) + return schemaCastFromSchema(scalar.GetSchema().GetType(), t.literalType.GetSchema()) case *core.Scalar_StructuredDataset: - if v.StructuredDataset == nil || v.StructuredDataset.Metadata == nil { + if v.StructuredDataset == nil || v.StructuredDataset.GetMetadata() == nil { return true } - return schemaCastFromStructuredDataset(scalar.GetStructuredDataset().GetMetadata().StructuredDatasetType, t.literalType.GetSchema()) + return schemaCastFromStructuredDataset(scalar.GetStructuredDataset().GetMetadata().GetStructuredDatasetType(), t.literalType.GetSchema()) default: return false } @@ -325,16 +316,16 @@ func (t structuredDatasetInstanceChecker) isInstance(lit *core.Literal) bool { return true case *core.Scalar_Schema: // Flyte Schema can only be serialized to parquet - format := t.literalType.GetStructuredDatasetType().Format + format := t.literalType.GetStructuredDatasetType().GetFormat() if len(format) != 0 && !strings.EqualFold(format, "parquet") { return false } - return structuredDatasetCastFromSchema(scalar.GetSchema().Type, t.literalType.GetStructuredDatasetType()) + return structuredDatasetCastFromSchema(scalar.GetSchema().GetType(), t.literalType.GetStructuredDatasetType()) case *core.Scalar_StructuredDataset: - if v.StructuredDataset == nil || v.StructuredDataset.Metadata == nil { + if v.StructuredDataset == nil || v.StructuredDataset.GetMetadata() == nil { return true } - return structuredDatasetCastFromStructuredDataset(scalar.GetStructuredDataset().GetMetadata().StructuredDatasetType, t.literalType.GetStructuredDatasetType()) + return structuredDatasetCastFromStructuredDataset(scalar.GetStructuredDataset().GetMetadata().GetStructuredDatasetType(), t.literalType.GetStructuredDatasetType()) default: return false } diff --git a/flytepropeller/pkg/compiler/validators/utils_test.go b/flytepropeller/pkg/compiler/validators/utils_test.go index af62161653..412c0d3356 100644 --- a/flytepropeller/pkg/compiler/validators/utils_test.go +++ b/flytepropeller/pkg/compiler/validators/utils_test.go @@ -419,86 +419,6 @@ func TestIsInstance(t *testing.T) { assert.True(t, IsInstance(literals, expectedLt)) }) - t.Run("nested Lists of offloaded List of string types", func(t *testing.T) { - inferredType := &core.LiteralType{ - Type: &core.LiteralType_CollectionType{ - CollectionType: &core.LiteralType{ - Type: &core.LiteralType_Simple{ - Simple: core.SimpleType_STRING, - }, - }, - }, - } - literals := &core.Literal{ - Value: &core.Literal_Collection{ - Collection: &core.LiteralCollection{ - Literals: []*core.Literal{ - { - Value: &core.Literal_OffloadedMetadata{ - OffloadedMetadata: &core.LiteralOffloadedMetadata{ - Uri: "dummy/uri-1", - SizeBytes: 1000, - InferredType: inferredType, - }, - }, - }, - { - Value: &core.Literal_OffloadedMetadata{ - OffloadedMetadata: &core.LiteralOffloadedMetadata{ - Uri: "dummy/uri-2", - SizeBytes: 1000, - InferredType: inferredType, - }, - }, - }, - }, - }, - }, - } - expectedLt := inferredType - assert.True(t, IsInstance(literals, expectedLt)) - }) - t.Run("nested map of offloaded map of string types", func(t *testing.T) { - inferredType := &core.LiteralType{ - Type: &core.LiteralType_MapValueType{ - MapValueType: &core.LiteralType{ - Type: &core.LiteralType_Simple{ - Simple: core.SimpleType_STRING, - }, - }, - }, - } - literals := &core.Literal{ - Value: &core.Literal_Map{ - Map: &core.LiteralMap{ - Literals: map[string]*core.Literal{ - - "key1": { - Value: &core.Literal_OffloadedMetadata{ - OffloadedMetadata: &core.LiteralOffloadedMetadata{ - Uri: "dummy/uri-1", - SizeBytes: 1000, - InferredType: inferredType, - }, - }, - }, - "key2": { - Value: &core.Literal_OffloadedMetadata{ - OffloadedMetadata: &core.LiteralOffloadedMetadata{ - Uri: "dummy/uri-2", - SizeBytes: 1000, - InferredType: inferredType, - }, - }, - }, - }, - }, - }, - } - - expectedLt := inferredType - assert.True(t, IsInstance(literals, expectedLt)) - }) } func TestJoinVariableMapsUniqueKeys(t *testing.T) { diff --git a/flytepropeller/pkg/compiler/validators/vars.go b/flytepropeller/pkg/compiler/validators/vars.go index e114dc4fc0..445dd258bd 100644 --- a/flytepropeller/pkg/compiler/validators/vars.go +++ b/flytepropeller/pkg/compiler/validators/vars.go @@ -48,12 +48,12 @@ func validateVarType(nodeID c.NodeID, paramName string, param *flyte.Variable, // Validate parameters have their required attributes set func validateVariables(nodeID c.NodeID, params *flyte.VariableMap, errs errors.CompileErrors) { - for paramName, param := range params.Variables { + for paramName, param := range params.GetVariables() { if len(paramName) == 0 { errs.Collect(errors.NewValueRequiredErr(nodeID, "paramName")) } - if param.Type == nil { + if param.GetType() == nil { errs.Collect(errors.NewValueRequiredErr(nodeID, "param.Type")) } } diff --git a/flytepropeller/pkg/compiler/workflow_compiler.go b/flytepropeller/pkg/compiler/workflow_compiler.go index 89e82ebd16..2cd5e9a65d 100644 --- a/flytepropeller/pkg/compiler/workflow_compiler.go +++ b/flytepropeller/pkg/compiler/workflow_compiler.go @@ -46,7 +46,7 @@ import ( // Updates workflows and tasks references to reflect the needed ones for this workflow (ignoring subworkflows) func (w *workflowBuilder) updateRequiredReferences() { - reqs := getRequirements(w.CoreWorkflow.Template, w.allSubWorkflows, false, errors.NewCompileErrors()) + reqs := getRequirements(w.GetCoreWorkflow().GetTemplate(), w.allSubWorkflows, false, errors.NewCompileErrors()) workflows := map[c.WorkflowIDKey]c.InterfaceProvider{} tasks := c.TaskIndex{} for _, workflowID := range reqs.launchPlanIds { @@ -167,8 +167,8 @@ func (w workflowBuilder) AddEdges(n c.NodeBuilder, edgeDirection c.EdgeDirection // Contains the main validation logic for the coreWorkflow. If successful, it'll build an executable Workflow. func (w workflowBuilder) ValidateWorkflow(fg *flyteWorkflow, errs errors.CompileErrors) (c.Workflow, bool) { - if len(fg.Template.Nodes) == 0 { - errs.Collect(errors.NewNoNodesFoundErr(fg.Template.Id.String())) + if len(fg.GetTemplate().GetNodes()) == 0 { + errs.Collect(errors.NewNoNodesFoundErr(fg.GetTemplate().GetId().String())) return nil, !errs.HasErrors() } @@ -183,25 +183,25 @@ func (w workflowBuilder) ValidateWorkflow(fg *flyteWorkflow, errs errors.Compile } var ok bool - if wf.CoreWorkflow.Template.Interface, ok = v.ValidateInterface(c.StartNodeID, wf.CoreWorkflow.Template.Interface, errs.NewScope()); !ok { + if wf.CoreWorkflow.Template.Interface, ok = v.ValidateInterface(c.StartNodeID, wf.GetCoreWorkflow().GetTemplate().GetInterface(), errs.NewScope()); !ok { return nil, !errs.HasErrors() } - checkpoint := make([]*core.Node, 0, len(fg.Template.Nodes)) - checkpoint = append(checkpoint, fg.Template.Nodes...) - fg.Template.Nodes = make([]*core.Node, 0, len(fg.Template.Nodes)) + checkpoint := make([]*core.Node, 0, len(fg.GetTemplate().GetNodes())) + checkpoint = append(checkpoint, fg.GetTemplate().GetNodes()...) + fg.Template.Nodes = make([]*core.Node, 0, len(fg.GetTemplate().GetNodes())) wf.GetCoreWorkflow().Connections = &core.ConnectionSet{ Downstream: make(map[string]*core.ConnectionSet_IdList), Upstream: make(map[string]*core.ConnectionSet_IdList), } globalInputNode, _ := wf.AddNode(wf.GetOrCreateNodeBuilder(startNode), errs) - globalInputNode.SetInterface(&core.TypedInterface{Outputs: wf.CoreWorkflow.Template.Interface.Inputs}) + globalInputNode.SetInterface(&core.TypedInterface{Outputs: wf.GetCoreWorkflow().GetTemplate().GetInterface().GetInputs()}) endNode := &core.Node{Id: c.EndNodeID} globalOutputNode, _ := wf.AddNode(wf.GetOrCreateNodeBuilder(endNode), errs) - globalOutputNode.SetInterface(&core.TypedInterface{Inputs: wf.CoreWorkflow.Template.Interface.Outputs}) - globalOutputNode.SetInputs(wf.CoreWorkflow.Template.Outputs) + globalOutputNode.SetInterface(&core.TypedInterface{Inputs: wf.GetCoreWorkflow().GetTemplate().GetInterface().GetOutputs()}) + globalOutputNode.SetInputs(wf.GetCoreWorkflow().GetTemplate().GetOutputs()) // Track top level nodes (a branch in a branch node is NOT a top level node). The final graph should ensure that all // top level nodes are executed before the end node. We do that by adding execution edges from leaf nodes that do not @@ -210,7 +210,7 @@ func (w workflowBuilder) ValidateWorkflow(fg *flyteWorkflow, errs errors.Compile // Add and validate all other nodes for _, n := range checkpoint { - topLevelNodes.Insert(n.Id) + topLevelNodes.Insert(n.GetId()) if node, addOk := wf.AddNode(wf.GetOrCreateNodeBuilder(n), errs.NewScope()); addOk { v.ValidateNode(&wf, node, false /* validateConditionTypes */, errs.NewScope()) } @@ -225,8 +225,8 @@ func (w workflowBuilder) ValidateWorkflow(fg *flyteWorkflow, errs errors.Compile wf.AddEdges(n, c.EdgeDirectionBidirectional, errs.NewScope()) } - if fg.Template.FailureNode != nil { - failureNode := fg.Template.FailureNode + if fg.GetTemplate().GetFailureNode() != nil { + failureNode := fg.GetTemplate().GetFailureNode() v.ValidateNode(&wf, wf.GetOrCreateNodeBuilder(failureNode), false, errs.NewScope()) wf.AddEdges(wf.GetOrCreateNodeBuilder(failureNode), c.EdgeDirectionUpstream, errs.NewScope()) } @@ -272,7 +272,7 @@ func (w workflowBuilder) ValidateWorkflow(fg *flyteWorkflow, errs errors.Compile // Validates that all requirements for the coreWorkflow and its subworkflows are present. func (w workflowBuilder) validateAllRequirements(errs errors.CompileErrors) bool { - reqs := getRequirements(w.CoreWorkflow.Template, w.allSubWorkflows, true, errs) + reqs := getRequirements(w.GetCoreWorkflow().GetTemplate(), w.allSubWorkflows, true, errs) for _, lp := range reqs.launchPlanIds { if _, ok := w.allLaunchPlans[lp.String()]; !ok { @@ -314,17 +314,17 @@ func CompileWorkflow(primaryWf *core.WorkflowTemplate, subworkflows []*core.Work uniqueTasks := sets.NewString() taskBuilders := make([]c.Task, 0, len(tasks)) for _, task := range tasks { - if task.Template == nil || task.Template.Id == nil { + if task.GetTemplate() == nil || task.GetTemplate().GetId() == nil { errs.Collect(errors.NewValueRequiredErr("task", "Template.Id")) return nil, errs } - if uniqueTasks.Has(task.Template.Id.String()) { + if uniqueTasks.Has(task.GetTemplate().GetId().String()) { continue } - taskBuilders = append(taskBuilders, &taskBuilder{flyteTask: task.Template}) - uniqueTasks.Insert(task.Template.Id.String()) + taskBuilders = append(taskBuilders, &taskBuilder{flyteTask: task.GetTemplate()}) + uniqueTasks.Insert(task.GetTemplate().GetId().String()) } // Validate overall requirements of the coreWorkflow. diff --git a/flytepropeller/pkg/compiler/workflow_compiler_test.go b/flytepropeller/pkg/compiler/workflow_compiler_test.go index 84d55aa342..8c9cefdc25 100644 --- a/flytepropeller/pkg/compiler/workflow_compiler_test.go +++ b/flytepropeller/pkg/compiler/workflow_compiler_test.go @@ -36,7 +36,7 @@ func dumpIdentifierNames(ids []common.Identifier) []string { res := make([]string, 0, len(ids)) for _, id := range ids { - res = append(res, id.Name) + res = append(res, id.GetName()) } return res @@ -98,7 +98,7 @@ func ExampleCompileWorkflow_basic() { for _, task := range tasks { compiledTask, err := CompileTask(task) if err != nil { - fmt.Printf("failed to compile task [%v]. Error: %v", task.Id, err) + fmt.Printf("failed to compile task [%v]. Error: %v", task.GetId(), err) return } @@ -106,7 +106,7 @@ func ExampleCompileWorkflow_basic() { } output, errs := CompileWorkflow(inputWorkflow, subWorkflows, compiledTasks, workflows) - fmt.Printf("Compiled Workflow in GraphViz: %v\n", visualize.ToGraphViz(output.Primary)) + fmt.Printf("Compiled Workflow in GraphViz: %v\n", visualize.ToGraphViz(output.GetPrimary())) fmt.Printf("Compile Errors: %v\n", errs) // Output: @@ -195,8 +195,8 @@ func TestCompileWorkflowWithFailureNode(t *testing.T) { } output, errs := CompileWorkflow(inputWorkflow, subWorkflows, compiledTasks, workflows) - assert.Equal(t, output.Primary.Template.FailureNode.Id, "FailureNode") - assert.NotNil(t, output.Primary.Template.FailureNode.GetTaskNode()) + assert.Equal(t, output.GetPrimary().GetTemplate().GetFailureNode().GetId(), "FailureNode") + assert.NotNil(t, output.GetPrimary().GetTemplate().GetFailureNode().GetTaskNode()) assert.Nil(t, errs) } @@ -287,7 +287,7 @@ func ExampleCompileWorkflow_inputsOutputsBinding() { for _, task := range inputTasks { compiledTask, err := CompileTask(task) if err != nil { - fmt.Printf("Failed to compile task [%v]. Error: %v", task.Id, err) + fmt.Printf("Failed to compile task [%v]. Error: %v", task.GetId(), err) return } @@ -298,7 +298,7 @@ func ExampleCompileWorkflow_inputsOutputsBinding() { if errs != nil { fmt.Printf("Compile Errors: %v\n", errs) } else { - fmt.Printf("Compiled Workflow in GraphViz: %v\n", visualize.ToGraphViz(output.Primary)) + fmt.Printf("Compiled Workflow in GraphViz: %v\n", visualize.ToGraphViz(output.GetPrimary())) } // Output: @@ -575,7 +575,7 @@ func TestValidateUnderlyingInterface(parentT *testing.T) { parentT.Run("TaskNode", func(t *testing.T) { errs := errors.NewCompileErrors() - iface, ifaceOk := v.ValidateUnderlyingInterface(&g, &nodeBuilder{flyteNode: inputWorkflow.Nodes[0]}, errs) + iface, ifaceOk := v.ValidateUnderlyingInterface(&g, &nodeBuilder{flyteNode: inputWorkflow.GetNodes()[0]}, errs) assert.True(t, ifaceOk) assert.False(t, errs.HasErrors()) assert.Equal(t, taskIface, iface) @@ -587,7 +587,7 @@ func TestValidateUnderlyingInterface(parentT *testing.T) { Target: &core.Node_WorkflowNode{ WorkflowNode: &core.WorkflowNode{ Reference: &core.WorkflowNode_SubWorkflowRef{ - SubWorkflowRef: inputWorkflow.Id, + SubWorkflowRef: inputWorkflow.GetId(), }, }, }, @@ -605,7 +605,7 @@ func TestValidateUnderlyingInterface(parentT *testing.T) { BranchNode: &core.BranchNode{ IfElse: &core.IfElseBlock{ Case: &core.IfBlock{ - ThenNode: inputWorkflow.Nodes[0], + ThenNode: inputWorkflow.GetNodes()[0], }, }, }, @@ -613,7 +613,7 @@ func TestValidateUnderlyingInterface(parentT *testing.T) { }}, errs) assert.True(t, ifaceOk) assert.False(t, errs.HasErrors()) - assert.Equal(t, taskIface.Outputs, iface.Outputs) + assert.Equal(t, taskIface.GetOutputs(), iface.GetOutputs()) }) branchT.Run("TwoCases", func(t *testing.T) { @@ -623,7 +623,7 @@ func TestValidateUnderlyingInterface(parentT *testing.T) { BranchNode: &core.BranchNode{ IfElse: &core.IfElseBlock{ Case: &core.IfBlock{ - ThenNode: inputWorkflow.Nodes[0], + ThenNode: inputWorkflow.GetNodes()[0], }, Other: []*core.IfBlock{ { @@ -631,7 +631,7 @@ func TestValidateUnderlyingInterface(parentT *testing.T) { Target: &core.Node_WorkflowNode{ WorkflowNode: &core.WorkflowNode{ Reference: &core.WorkflowNode_SubWorkflowRef{ - SubWorkflowRef: inputWorkflow.Id, + SubWorkflowRef: inputWorkflow.GetId(), }, }, }, @@ -720,9 +720,9 @@ func TestCompileWorkflow(t *testing.T) { assert.NoError(t, errs) assert.NotNil(t, output) if output != nil { - t.Logf("Graph Repr: %v", visualize.ToGraphViz(output.Primary)) + t.Logf("Graph Repr: %v", visualize.ToGraphViz(output.GetPrimary())) - assert.Equal(t, []string{"node_123"}, output.Primary.Connections.Upstream["node_456"].Ids) + assert.Equal(t, []string{"node_123"}, output.GetPrimary().GetConnections().GetUpstream()["node_456"].GetIds()) } } diff --git a/flytepropeller/pkg/controller/config/config.go b/flytepropeller/pkg/controller/config/config.go index 4801b8993a..e896536a13 100644 --- a/flytepropeller/pkg/controller/config/config.go +++ b/flytepropeller/pkg/controller/config/config.go @@ -128,7 +128,7 @@ var ( LiteralOffloadingConfig: LiteralOffloadingConfig{ Enabled: false, // Default keep this disabled and we will followup when flytekit is released with the offloaded changes. SupportedSDKVersions: map[string]string{ // The key is the SDK name (matches the supported SDK in core.RuntimeMetadata_RuntimeType) and the value is the minimum supported version - "FLYTE_SDK": "1.13.5", // Expected release number with flytekit support from this PR https://github.com/flyteorg/flytekit/pull/2685 + "FLYTE_SDK": "1.13.14", // Expected release number with flytekit support from this PR https://github.com/flyteorg/flytekit/pull/2685 }, MinSizeInMBForOffloading: 10, // 10 MB is the default size for offloading MaxSizeInMBForOffloading: 1000, // 1 GB is the default size before failing fast. diff --git a/flytepropeller/pkg/controller/handler.go b/flytepropeller/pkg/controller/handler.go index 49c2c21549..3e9f7526fc 100644 --- a/flytepropeller/pkg/controller/handler.go +++ b/flytepropeller/pkg/controller/handler.go @@ -102,7 +102,7 @@ func (p *Propeller) TryMutateWorkflow(ctx context.Context, originalW *v1alpha1.F } ctx = contextutils.WithResourceVersion(ctx, mutableW.GetResourceVersion()) - maxRetries := uint32(p.cfg.MaxWorkflowRetries) + maxRetries := uint32(p.cfg.MaxWorkflowRetries) // #nosec G115 if IsDeleted(mutableW) || (mutableW.Status.FailedAttempts > maxRetries) { var err error func() { @@ -267,7 +267,7 @@ func (p *Propeller) parseWorkflowClosureCrdFields(ctx context.Context, dataRefer return nil, err } - wfClosureCrdFields, err := k8s.BuildWfClosureCrdFields(wfClosure.CompiledWorkflow) + wfClosureCrdFields, err := k8s.BuildWfClosureCrdFields(wfClosure.GetCompiledWorkflow()) if err != nil { logger.Errorf(ctx, "Failed to parse workflow closure data from '%s' with error '%s'", dataReference, err) return nil, err diff --git a/flytepropeller/pkg/controller/interfaces/rate_limiter.go b/flytepropeller/pkg/controller/interfaces/rate_limiter.go new file mode 100644 index 0000000000..576d9736c4 --- /dev/null +++ b/flytepropeller/pkg/controller/interfaces/rate_limiter.go @@ -0,0 +1,36 @@ +package interfaces + +import ( + "context" + "time" + + "golang.org/x/time/rate" +) + +//go:generate mockery-v2 --name Limiter --output ../mocks --case=snake --with-expecter +//go:generate mockery-v2 --name Reservation --output ../mocks --case=snake --with-expecter + +type Limiter interface { + Allow() bool + AllowN(t time.Time, n int) bool + Burst() int + Limit() rate.Limit + Reserve() Reservation + ReserveN(t time.Time, n int) Reservation + SetBurst(newBurst int) + SetBurstAt(t time.Time, newBurst int) + SetLimit(newLimit rate.Limit) + SetLimitAt(t time.Time, newLimit rate.Limit) + Tokens() float64 + TokensAt(t time.Time) float64 + Wait(ctx context.Context) (err error) + WaitN(ctx context.Context, n int) (err error) +} + +type Reservation interface { + Cancel() + CancelAt(t time.Time) + Delay() time.Duration + DelayFrom(t time.Time) time.Duration + OK() bool +} diff --git a/flytepropeller/pkg/controller/mocks/limiter.go b/flytepropeller/pkg/controller/mocks/limiter.go new file mode 100644 index 0000000000..709cdd4d65 --- /dev/null +++ b/flytepropeller/pkg/controller/mocks/limiter.go @@ -0,0 +1,637 @@ +// Code generated by mockery v2.40.3. DO NOT EDIT. + +package mocks + +import ( + context "context" + + interfaces "github.com/flyteorg/flyte/flytepropeller/pkg/controller/interfaces" + mock "github.com/stretchr/testify/mock" + + rate "golang.org/x/time/rate" + + time "time" +) + +// Limiter is an autogenerated mock type for the Limiter type +type Limiter struct { + mock.Mock +} + +type Limiter_Expecter struct { + mock *mock.Mock +} + +func (_m *Limiter) EXPECT() *Limiter_Expecter { + return &Limiter_Expecter{mock: &_m.Mock} +} + +// Allow provides a mock function with given fields: +func (_m *Limiter) Allow() bool { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Allow") + } + + var r0 bool + if rf, ok := ret.Get(0).(func() bool); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// Limiter_Allow_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Allow' +type Limiter_Allow_Call struct { + *mock.Call +} + +// Allow is a helper method to define mock.On call +func (_e *Limiter_Expecter) Allow() *Limiter_Allow_Call { + return &Limiter_Allow_Call{Call: _e.mock.On("Allow")} +} + +func (_c *Limiter_Allow_Call) Run(run func()) *Limiter_Allow_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Limiter_Allow_Call) Return(_a0 bool) *Limiter_Allow_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_Allow_Call) RunAndReturn(run func() bool) *Limiter_Allow_Call { + _c.Call.Return(run) + return _c +} + +// AllowN provides a mock function with given fields: t, n +func (_m *Limiter) AllowN(t time.Time, n int) bool { + ret := _m.Called(t, n) + + if len(ret) == 0 { + panic("no return value specified for AllowN") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(time.Time, int) bool); ok { + r0 = rf(t, n) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// Limiter_AllowN_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AllowN' +type Limiter_AllowN_Call struct { + *mock.Call +} + +// AllowN is a helper method to define mock.On call +// - t time.Time +// - n int +func (_e *Limiter_Expecter) AllowN(t interface{}, n interface{}) *Limiter_AllowN_Call { + return &Limiter_AllowN_Call{Call: _e.mock.On("AllowN", t, n)} +} + +func (_c *Limiter_AllowN_Call) Run(run func(t time.Time, n int)) *Limiter_AllowN_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(time.Time), args[1].(int)) + }) + return _c +} + +func (_c *Limiter_AllowN_Call) Return(_a0 bool) *Limiter_AllowN_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_AllowN_Call) RunAndReturn(run func(time.Time, int) bool) *Limiter_AllowN_Call { + _c.Call.Return(run) + return _c +} + +// Burst provides a mock function with given fields: +func (_m *Limiter) Burst() int { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Burst") + } + + var r0 int + if rf, ok := ret.Get(0).(func() int); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(int) + } + + return r0 +} + +// Limiter_Burst_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Burst' +type Limiter_Burst_Call struct { + *mock.Call +} + +// Burst is a helper method to define mock.On call +func (_e *Limiter_Expecter) Burst() *Limiter_Burst_Call { + return &Limiter_Burst_Call{Call: _e.mock.On("Burst")} +} + +func (_c *Limiter_Burst_Call) Run(run func()) *Limiter_Burst_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Limiter_Burst_Call) Return(_a0 int) *Limiter_Burst_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_Burst_Call) RunAndReturn(run func() int) *Limiter_Burst_Call { + _c.Call.Return(run) + return _c +} + +// Limit provides a mock function with given fields: +func (_m *Limiter) Limit() rate.Limit { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Limit") + } + + var r0 rate.Limit + if rf, ok := ret.Get(0).(func() rate.Limit); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(rate.Limit) + } + + return r0 +} + +// Limiter_Limit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Limit' +type Limiter_Limit_Call struct { + *mock.Call +} + +// Limit is a helper method to define mock.On call +func (_e *Limiter_Expecter) Limit() *Limiter_Limit_Call { + return &Limiter_Limit_Call{Call: _e.mock.On("Limit")} +} + +func (_c *Limiter_Limit_Call) Run(run func()) *Limiter_Limit_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Limiter_Limit_Call) Return(_a0 rate.Limit) *Limiter_Limit_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_Limit_Call) RunAndReturn(run func() rate.Limit) *Limiter_Limit_Call { + _c.Call.Return(run) + return _c +} + +// Reserve provides a mock function with given fields: +func (_m *Limiter) Reserve() interfaces.Reservation { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Reserve") + } + + var r0 interfaces.Reservation + if rf, ok := ret.Get(0).(func() interfaces.Reservation); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(interfaces.Reservation) + } + } + + return r0 +} + +// Limiter_Reserve_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Reserve' +type Limiter_Reserve_Call struct { + *mock.Call +} + +// Reserve is a helper method to define mock.On call +func (_e *Limiter_Expecter) Reserve() *Limiter_Reserve_Call { + return &Limiter_Reserve_Call{Call: _e.mock.On("Reserve")} +} + +func (_c *Limiter_Reserve_Call) Run(run func()) *Limiter_Reserve_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Limiter_Reserve_Call) Return(_a0 interfaces.Reservation) *Limiter_Reserve_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_Reserve_Call) RunAndReturn(run func() interfaces.Reservation) *Limiter_Reserve_Call { + _c.Call.Return(run) + return _c +} + +// ReserveN provides a mock function with given fields: t, n +func (_m *Limiter) ReserveN(t time.Time, n int) interfaces.Reservation { + ret := _m.Called(t, n) + + if len(ret) == 0 { + panic("no return value specified for ReserveN") + } + + var r0 interfaces.Reservation + if rf, ok := ret.Get(0).(func(time.Time, int) interfaces.Reservation); ok { + r0 = rf(t, n) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(interfaces.Reservation) + } + } + + return r0 +} + +// Limiter_ReserveN_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ReserveN' +type Limiter_ReserveN_Call struct { + *mock.Call +} + +// ReserveN is a helper method to define mock.On call +// - t time.Time +// - n int +func (_e *Limiter_Expecter) ReserveN(t interface{}, n interface{}) *Limiter_ReserveN_Call { + return &Limiter_ReserveN_Call{Call: _e.mock.On("ReserveN", t, n)} +} + +func (_c *Limiter_ReserveN_Call) Run(run func(t time.Time, n int)) *Limiter_ReserveN_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(time.Time), args[1].(int)) + }) + return _c +} + +func (_c *Limiter_ReserveN_Call) Return(_a0 interfaces.Reservation) *Limiter_ReserveN_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_ReserveN_Call) RunAndReturn(run func(time.Time, int) interfaces.Reservation) *Limiter_ReserveN_Call { + _c.Call.Return(run) + return _c +} + +// SetBurst provides a mock function with given fields: newBurst +func (_m *Limiter) SetBurst(newBurst int) { + _m.Called(newBurst) +} + +// Limiter_SetBurst_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetBurst' +type Limiter_SetBurst_Call struct { + *mock.Call +} + +// SetBurst is a helper method to define mock.On call +// - newBurst int +func (_e *Limiter_Expecter) SetBurst(newBurst interface{}) *Limiter_SetBurst_Call { + return &Limiter_SetBurst_Call{Call: _e.mock.On("SetBurst", newBurst)} +} + +func (_c *Limiter_SetBurst_Call) Run(run func(newBurst int)) *Limiter_SetBurst_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(int)) + }) + return _c +} + +func (_c *Limiter_SetBurst_Call) Return() *Limiter_SetBurst_Call { + _c.Call.Return() + return _c +} + +func (_c *Limiter_SetBurst_Call) RunAndReturn(run func(int)) *Limiter_SetBurst_Call { + _c.Call.Return(run) + return _c +} + +// SetBurstAt provides a mock function with given fields: t, newBurst +func (_m *Limiter) SetBurstAt(t time.Time, newBurst int) { + _m.Called(t, newBurst) +} + +// Limiter_SetBurstAt_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetBurstAt' +type Limiter_SetBurstAt_Call struct { + *mock.Call +} + +// SetBurstAt is a helper method to define mock.On call +// - t time.Time +// - newBurst int +func (_e *Limiter_Expecter) SetBurstAt(t interface{}, newBurst interface{}) *Limiter_SetBurstAt_Call { + return &Limiter_SetBurstAt_Call{Call: _e.mock.On("SetBurstAt", t, newBurst)} +} + +func (_c *Limiter_SetBurstAt_Call) Run(run func(t time.Time, newBurst int)) *Limiter_SetBurstAt_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(time.Time), args[1].(int)) + }) + return _c +} + +func (_c *Limiter_SetBurstAt_Call) Return() *Limiter_SetBurstAt_Call { + _c.Call.Return() + return _c +} + +func (_c *Limiter_SetBurstAt_Call) RunAndReturn(run func(time.Time, int)) *Limiter_SetBurstAt_Call { + _c.Call.Return(run) + return _c +} + +// SetLimit provides a mock function with given fields: newLimit +func (_m *Limiter) SetLimit(newLimit rate.Limit) { + _m.Called(newLimit) +} + +// Limiter_SetLimit_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetLimit' +type Limiter_SetLimit_Call struct { + *mock.Call +} + +// SetLimit is a helper method to define mock.On call +// - newLimit rate.Limit +func (_e *Limiter_Expecter) SetLimit(newLimit interface{}) *Limiter_SetLimit_Call { + return &Limiter_SetLimit_Call{Call: _e.mock.On("SetLimit", newLimit)} +} + +func (_c *Limiter_SetLimit_Call) Run(run func(newLimit rate.Limit)) *Limiter_SetLimit_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(rate.Limit)) + }) + return _c +} + +func (_c *Limiter_SetLimit_Call) Return() *Limiter_SetLimit_Call { + _c.Call.Return() + return _c +} + +func (_c *Limiter_SetLimit_Call) RunAndReturn(run func(rate.Limit)) *Limiter_SetLimit_Call { + _c.Call.Return(run) + return _c +} + +// SetLimitAt provides a mock function with given fields: t, newLimit +func (_m *Limiter) SetLimitAt(t time.Time, newLimit rate.Limit) { + _m.Called(t, newLimit) +} + +// Limiter_SetLimitAt_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetLimitAt' +type Limiter_SetLimitAt_Call struct { + *mock.Call +} + +// SetLimitAt is a helper method to define mock.On call +// - t time.Time +// - newLimit rate.Limit +func (_e *Limiter_Expecter) SetLimitAt(t interface{}, newLimit interface{}) *Limiter_SetLimitAt_Call { + return &Limiter_SetLimitAt_Call{Call: _e.mock.On("SetLimitAt", t, newLimit)} +} + +func (_c *Limiter_SetLimitAt_Call) Run(run func(t time.Time, newLimit rate.Limit)) *Limiter_SetLimitAt_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(time.Time), args[1].(rate.Limit)) + }) + return _c +} + +func (_c *Limiter_SetLimitAt_Call) Return() *Limiter_SetLimitAt_Call { + _c.Call.Return() + return _c +} + +func (_c *Limiter_SetLimitAt_Call) RunAndReturn(run func(time.Time, rate.Limit)) *Limiter_SetLimitAt_Call { + _c.Call.Return(run) + return _c +} + +// Tokens provides a mock function with given fields: +func (_m *Limiter) Tokens() float64 { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Tokens") + } + + var r0 float64 + if rf, ok := ret.Get(0).(func() float64); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(float64) + } + + return r0 +} + +// Limiter_Tokens_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Tokens' +type Limiter_Tokens_Call struct { + *mock.Call +} + +// Tokens is a helper method to define mock.On call +func (_e *Limiter_Expecter) Tokens() *Limiter_Tokens_Call { + return &Limiter_Tokens_Call{Call: _e.mock.On("Tokens")} +} + +func (_c *Limiter_Tokens_Call) Run(run func()) *Limiter_Tokens_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Limiter_Tokens_Call) Return(_a0 float64) *Limiter_Tokens_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_Tokens_Call) RunAndReturn(run func() float64) *Limiter_Tokens_Call { + _c.Call.Return(run) + return _c +} + +// TokensAt provides a mock function with given fields: t +func (_m *Limiter) TokensAt(t time.Time) float64 { + ret := _m.Called(t) + + if len(ret) == 0 { + panic("no return value specified for TokensAt") + } + + var r0 float64 + if rf, ok := ret.Get(0).(func(time.Time) float64); ok { + r0 = rf(t) + } else { + r0 = ret.Get(0).(float64) + } + + return r0 +} + +// Limiter_TokensAt_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'TokensAt' +type Limiter_TokensAt_Call struct { + *mock.Call +} + +// TokensAt is a helper method to define mock.On call +// - t time.Time +func (_e *Limiter_Expecter) TokensAt(t interface{}) *Limiter_TokensAt_Call { + return &Limiter_TokensAt_Call{Call: _e.mock.On("TokensAt", t)} +} + +func (_c *Limiter_TokensAt_Call) Run(run func(t time.Time)) *Limiter_TokensAt_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(time.Time)) + }) + return _c +} + +func (_c *Limiter_TokensAt_Call) Return(_a0 float64) *Limiter_TokensAt_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Limiter_TokensAt_Call) RunAndReturn(run func(time.Time) float64) *Limiter_TokensAt_Call { + _c.Call.Return(run) + return _c +} + +// Wait provides a mock function with given fields: ctx +func (_m *Limiter) Wait(ctx context.Context) error { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for Wait") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context) error); ok { + r0 = rf(ctx) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Limiter_Wait_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Wait' +type Limiter_Wait_Call struct { + *mock.Call +} + +// Wait is a helper method to define mock.On call +// - ctx context.Context +func (_e *Limiter_Expecter) Wait(ctx interface{}) *Limiter_Wait_Call { + return &Limiter_Wait_Call{Call: _e.mock.On("Wait", ctx)} +} + +func (_c *Limiter_Wait_Call) Run(run func(ctx context.Context)) *Limiter_Wait_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context)) + }) + return _c +} + +func (_c *Limiter_Wait_Call) Return(err error) *Limiter_Wait_Call { + _c.Call.Return(err) + return _c +} + +func (_c *Limiter_Wait_Call) RunAndReturn(run func(context.Context) error) *Limiter_Wait_Call { + _c.Call.Return(run) + return _c +} + +// WaitN provides a mock function with given fields: ctx, n +func (_m *Limiter) WaitN(ctx context.Context, n int) error { + ret := _m.Called(ctx, n) + + if len(ret) == 0 { + panic("no return value specified for WaitN") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int) error); ok { + r0 = rf(ctx, n) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Limiter_WaitN_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WaitN' +type Limiter_WaitN_Call struct { + *mock.Call +} + +// WaitN is a helper method to define mock.On call +// - ctx context.Context +// - n int +func (_e *Limiter_Expecter) WaitN(ctx interface{}, n interface{}) *Limiter_WaitN_Call { + return &Limiter_WaitN_Call{Call: _e.mock.On("WaitN", ctx, n)} +} + +func (_c *Limiter_WaitN_Call) Run(run func(ctx context.Context, n int)) *Limiter_WaitN_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(int)) + }) + return _c +} + +func (_c *Limiter_WaitN_Call) Return(err error) *Limiter_WaitN_Call { + _c.Call.Return(err) + return _c +} + +func (_c *Limiter_WaitN_Call) RunAndReturn(run func(context.Context, int) error) *Limiter_WaitN_Call { + _c.Call.Return(run) + return _c +} + +// NewLimiter creates a new instance of Limiter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewLimiter(t interface { + mock.TestingT + Cleanup(func()) +}) *Limiter { + mock := &Limiter{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/flytepropeller/pkg/controller/mocks/reservation.go b/flytepropeller/pkg/controller/mocks/reservation.go new file mode 100644 index 0000000000..d609c0b034 --- /dev/null +++ b/flytepropeller/pkg/controller/mocks/reservation.go @@ -0,0 +1,237 @@ +// Code generated by mockery v2.40.3. DO NOT EDIT. + +package mocks + +import ( + time "time" + + mock "github.com/stretchr/testify/mock" +) + +// Reservation is an autogenerated mock type for the Reservation type +type Reservation struct { + mock.Mock +} + +type Reservation_Expecter struct { + mock *mock.Mock +} + +func (_m *Reservation) EXPECT() *Reservation_Expecter { + return &Reservation_Expecter{mock: &_m.Mock} +} + +// Cancel provides a mock function with given fields: +func (_m *Reservation) Cancel() { + _m.Called() +} + +// Reservation_Cancel_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Cancel' +type Reservation_Cancel_Call struct { + *mock.Call +} + +// Cancel is a helper method to define mock.On call +func (_e *Reservation_Expecter) Cancel() *Reservation_Cancel_Call { + return &Reservation_Cancel_Call{Call: _e.mock.On("Cancel")} +} + +func (_c *Reservation_Cancel_Call) Run(run func()) *Reservation_Cancel_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Reservation_Cancel_Call) Return() *Reservation_Cancel_Call { + _c.Call.Return() + return _c +} + +func (_c *Reservation_Cancel_Call) RunAndReturn(run func()) *Reservation_Cancel_Call { + _c.Call.Return(run) + return _c +} + +// CancelAt provides a mock function with given fields: t +func (_m *Reservation) CancelAt(t time.Time) { + _m.Called(t) +} + +// Reservation_CancelAt_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CancelAt' +type Reservation_CancelAt_Call struct { + *mock.Call +} + +// CancelAt is a helper method to define mock.On call +// - t time.Time +func (_e *Reservation_Expecter) CancelAt(t interface{}) *Reservation_CancelAt_Call { + return &Reservation_CancelAt_Call{Call: _e.mock.On("CancelAt", t)} +} + +func (_c *Reservation_CancelAt_Call) Run(run func(t time.Time)) *Reservation_CancelAt_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(time.Time)) + }) + return _c +} + +func (_c *Reservation_CancelAt_Call) Return() *Reservation_CancelAt_Call { + _c.Call.Return() + return _c +} + +func (_c *Reservation_CancelAt_Call) RunAndReturn(run func(time.Time)) *Reservation_CancelAt_Call { + _c.Call.Return(run) + return _c +} + +// Delay provides a mock function with given fields: +func (_m *Reservation) Delay() time.Duration { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Delay") + } + + var r0 time.Duration + if rf, ok := ret.Get(0).(func() time.Duration); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(time.Duration) + } + + return r0 +} + +// Reservation_Delay_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Delay' +type Reservation_Delay_Call struct { + *mock.Call +} + +// Delay is a helper method to define mock.On call +func (_e *Reservation_Expecter) Delay() *Reservation_Delay_Call { + return &Reservation_Delay_Call{Call: _e.mock.On("Delay")} +} + +func (_c *Reservation_Delay_Call) Run(run func()) *Reservation_Delay_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Reservation_Delay_Call) Return(_a0 time.Duration) *Reservation_Delay_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Reservation_Delay_Call) RunAndReturn(run func() time.Duration) *Reservation_Delay_Call { + _c.Call.Return(run) + return _c +} + +// DelayFrom provides a mock function with given fields: t +func (_m *Reservation) DelayFrom(t time.Time) time.Duration { + ret := _m.Called(t) + + if len(ret) == 0 { + panic("no return value specified for DelayFrom") + } + + var r0 time.Duration + if rf, ok := ret.Get(0).(func(time.Time) time.Duration); ok { + r0 = rf(t) + } else { + r0 = ret.Get(0).(time.Duration) + } + + return r0 +} + +// Reservation_DelayFrom_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DelayFrom' +type Reservation_DelayFrom_Call struct { + *mock.Call +} + +// DelayFrom is a helper method to define mock.On call +// - t time.Time +func (_e *Reservation_Expecter) DelayFrom(t interface{}) *Reservation_DelayFrom_Call { + return &Reservation_DelayFrom_Call{Call: _e.mock.On("DelayFrom", t)} +} + +func (_c *Reservation_DelayFrom_Call) Run(run func(t time.Time)) *Reservation_DelayFrom_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(time.Time)) + }) + return _c +} + +func (_c *Reservation_DelayFrom_Call) Return(_a0 time.Duration) *Reservation_DelayFrom_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Reservation_DelayFrom_Call) RunAndReturn(run func(time.Time) time.Duration) *Reservation_DelayFrom_Call { + _c.Call.Return(run) + return _c +} + +// OK provides a mock function with given fields: +func (_m *Reservation) OK() bool { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for OK") + } + + var r0 bool + if rf, ok := ret.Get(0).(func() bool); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// Reservation_OK_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'OK' +type Reservation_OK_Call struct { + *mock.Call +} + +// OK is a helper method to define mock.On call +func (_e *Reservation_Expecter) OK() *Reservation_OK_Call { + return &Reservation_OK_Call{Call: _e.mock.On("OK")} +} + +func (_c *Reservation_OK_Call) Run(run func()) *Reservation_OK_Call { + _c.Call.Run(func(args mock.Arguments) { + run() + }) + return _c +} + +func (_c *Reservation_OK_Call) Return(_a0 bool) *Reservation_OK_Call { + _c.Call.Return(_a0) + return _c +} + +func (_c *Reservation_OK_Call) RunAndReturn(run func() bool) *Reservation_OK_Call { + _c.Call.Return(run) + return _c +} + +// NewReservation creates a new instance of Reservation. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewReservation(t interface { + mock.TestingT + Cleanup(func()) +}) *Reservation { + mock := &Reservation{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/flytepropeller/pkg/controller/nodes/array/event_recorder.go b/flytepropeller/pkg/controller/nodes/array/event_recorder.go index 999b383f39..d9feafe950 100644 --- a/flytepropeller/pkg/controller/nodes/array/event_recorder.go +++ b/flytepropeller/pkg/controller/nodes/array/event_recorder.go @@ -93,10 +93,10 @@ func (e *externalResourcesEventRecorder) process(ctx context.Context, nCtx inter // process events cacheStatus := idlcore.CatalogCacheStatus_CACHE_DISABLED for _, nodeExecutionEvent := range e.nodeEvents { - switch target := nodeExecutionEvent.TargetMetadata.(type) { + switch target := nodeExecutionEvent.GetTargetMetadata().(type) { case *event.NodeExecutionEvent_TaskNodeMetadata: if target.TaskNodeMetadata != nil { - cacheStatus = target.TaskNodeMetadata.CacheStatus + cacheStatus = target.TaskNodeMetadata.GetCacheStatus() } } } @@ -106,7 +106,7 @@ func (e *externalResourcesEventRecorder) process(ctx context.Context, nCtx inter if cacheStatus == idlcore.CatalogCacheStatus_CACHE_HIT && len(e.taskEvents) == 0 { e.externalResources = append(e.externalResources, &event.ExternalResourceInfo{ ExternalId: externalResourceID, - Index: uint32(index), + Index: uint32(index), // #nosec G115 RetryAttempt: retryAttempt, Phase: idlcore.TaskExecution_SUCCEEDED, CacheStatus: cacheStatus, @@ -122,7 +122,7 @@ func (e *externalResourcesEventRecorder) process(ctx context.Context, nCtx inter } for _, taskExecutionEvent := range e.taskEvents { - if mapLogPlugin != nil && len(taskExecutionEvent.Logs) > 0 { + if mapLogPlugin != nil && len(taskExecutionEvent.GetLogs()) > 0 { // override log links for subNode execution with map plugin logs, err := getPluginLogs(mapLogPlugin, nCtx, index, retryAttempt) if err != nil { @@ -132,18 +132,25 @@ func (e *externalResourcesEventRecorder) process(ctx context.Context, nCtx inter } } - for _, log := range taskExecutionEvent.Logs { - log.Name = fmt.Sprintf("%s-%d", log.Name, index) + for _, log := range taskExecutionEvent.GetLogs() { + log.Name = fmt.Sprintf("%s-%d", log.GetName(), index) } - e.externalResources = append(e.externalResources, &event.ExternalResourceInfo{ + externalResourceInfo := event.ExternalResourceInfo{ ExternalId: externalResourceID, - Index: uint32(index), - Logs: taskExecutionEvent.Logs, + Index: uint32(index), // #nosec G115 + Logs: taskExecutionEvent.GetLogs(), RetryAttempt: retryAttempt, - Phase: taskExecutionEvent.Phase, + Phase: taskExecutionEvent.GetPhase(), CacheStatus: cacheStatus, - }) + CustomInfo: taskExecutionEvent.GetCustomInfo(), + } + + if taskExecutionEvent.GetMetadata() != nil && len(taskExecutionEvent.GetMetadata().GetExternalResources()) == 1 { + externalResourceInfo.CustomInfo = taskExecutionEvent.GetMetadata().GetExternalResources()[0].GetCustomInfo() + } + + e.externalResources = append(e.externalResources, &externalResourceInfo) } // clear nodeEvents and taskEvents @@ -175,7 +182,7 @@ func (e *externalResourcesEventRecorder) finalize(ctx context.Context, nCtx inte nodeExecutionID := *nCtx.NodeExecutionMetadata().GetNodeExecutionID() if nCtx.ExecutionContext().GetEventVersion() != v1alpha1.EventVersion0 { - currentNodeUniqueID, err := common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nodeExecutionID.NodeId) + currentNodeUniqueID, err := common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nodeExecutionID.GetNodeId()) if err != nil { return err } @@ -315,7 +322,7 @@ func getPluginLogs(logPlugin tasklog.Plugin, nCtx interfaces.NodeExecutionContex extraLogTemplateVars := []tasklog.TemplateVar{ { Regex: mapplugin.LogTemplateRegexes.ExecutionIndex, - Value: strconv.FormatUint(uint64(index), 10), + Value: strconv.FormatUint(uint64(index), 10), // #nosec G115 }, { Regex: mapplugin.LogTemplateRegexes.RetryAttempt, @@ -374,12 +381,12 @@ func sendEvents(ctx context.Context, nCtx interfaces.NodeExecutionContext, index taskExecutionEvent := &event.TaskExecutionEvent{ TaskId: &idlcore.Identifier{ ResourceType: idlcore.ResourceType_TASK, - Project: workflowExecutionID.Project, - Domain: workflowExecutionID.Domain, + Project: workflowExecutionID.GetProject(), + Domain: workflowExecutionID.GetDomain(), Name: fmt.Sprintf("%s-%d", buildSubNodeID(nCtx, index), retryAttempt), Version: "v1", // this value is irrelevant but necessary for the identifier to be valid }, - ParentNodeExecutionId: nodeExecutionEvent.Id, + ParentNodeExecutionId: nodeExecutionEvent.GetId(), Phase: taskPhase, TaskType: "k8s-array", OccurredAt: timestamp, diff --git a/flytepropeller/pkg/controller/nodes/array/event_recorder_test.go b/flytepropeller/pkg/controller/nodes/array/event_recorder_test.go index 64fbff7666..5e418d3fc8 100644 --- a/flytepropeller/pkg/controller/nodes/array/event_recorder_test.go +++ b/flytepropeller/pkg/controller/nodes/array/event_recorder_test.go @@ -101,6 +101,6 @@ func TestGetPluginLogs(t *testing.T) { assert.Nil(t, err) assert.Equal(t, len(logConfig.Templates), len(logs)) - assert.Equal(t, "bar", logs[0].Name) - assert.Equal(t, "/console/projects/node_project/domains/node_domain/executions/node_name/nodeId/foo/taskId/task_name/attempt/0/mappedIndex/1/mappedAttempt/0/view/logs?duration=all", logs[0].Uri) + assert.Equal(t, "bar", logs[0].GetName()) + assert.Equal(t, "/console/projects/node_project/domains/node_domain/executions/node_name/nodeId/foo/taskId/task_name/attempt/0/mappedIndex/1/mappedAttempt/0/view/logs?duration=all", logs[0].GetUri()) } diff --git a/flytepropeller/pkg/controller/nodes/array/handler.go b/flytepropeller/pkg/controller/nodes/array/handler.go index 39bb6ab0ca..6418c35270 100644 --- a/flytepropeller/pkg/controller/nodes/array/handler.go +++ b/flytepropeller/pkg/controller/nodes/array/handler.go @@ -5,6 +5,9 @@ import ( "fmt" "math" "strconv" + "time" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" idlcore "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core" @@ -27,6 +30,11 @@ import ( "github.com/flyteorg/flyte/flytestdlib/storage" ) +const ( + // value is 3 days of seconds which is covered by 18 bits (262144) + MAX_DELTA_TIMESTAMP = 259200 +) + var ( nilLiteral = &idlcore.Literal{ Value: &idlcore.Literal_Scalar{ @@ -75,7 +83,7 @@ func (a *arrayNodeHandler) Abort(ctx context.Context, nCtx interfaces.NodeExecut switch arrayNodeState.Phase { case v1alpha1.ArrayNodePhaseExecuting, v1alpha1.ArrayNodePhaseFailing: for i, nodePhaseUint64 := range arrayNodeState.SubNodePhases.GetItems() { - nodePhase := v1alpha1.NodePhase(nodePhaseUint64) + nodePhase := v1alpha1.NodePhase(nodePhaseUint64) // #nosec G115 // do not process nodes that have not started or are in a terminal state if nodePhase == v1alpha1.NodePhaseNotYetStarted || isTerminalNodePhase(nodePhase) { @@ -95,7 +103,7 @@ func (a *arrayNodeHandler) Abort(ctx context.Context, nCtx interfaces.NodeExecut messageCollector.Collect(i, err.Error()) } else { // record events transitioning subNodes to aborted - retryAttempt := uint32(arrayNodeState.SubNodeRetryAttempts.GetItem(i)) + retryAttempt := uint32(arrayNodeState.SubNodeRetryAttempts.GetItem(i)) // #nosec G115 if err := sendEvents(ctx, nCtx, i, retryAttempt, idlcore.NodeExecution_ABORTED, idlcore.TaskExecution_ABORTED, eventRecorder, a.eventConfig); err != nil { logger.Warnf(ctx, "failed to record ArrayNode events: %v", err) @@ -109,7 +117,7 @@ func (a *arrayNodeHandler) Abort(ctx context.Context, nCtx interfaces.NodeExecut } if messageCollector.Length() > 0 { - return fmt.Errorf(messageCollector.Summary(events.MaxErrorMessageLength)) + return fmt.Errorf(messageCollector.Summary(events.MaxErrorMessageLength)) //nolint:govet,staticcheck } // update state for subNodes @@ -135,7 +143,7 @@ func (a *arrayNodeHandler) Finalize(ctx context.Context, nCtx interfaces.NodeExe switch arrayNodeState.Phase { case v1alpha1.ArrayNodePhaseExecuting, v1alpha1.ArrayNodePhaseFailing, v1alpha1.ArrayNodePhaseSucceeding: for i, nodePhaseUint64 := range arrayNodeState.SubNodePhases.GetItems() { - nodePhase := v1alpha1.NodePhase(nodePhaseUint64) + nodePhase := v1alpha1.NodePhase(nodePhaseUint64) // #nosec G115 // do not process nodes that have not started or are in a terminal state if nodePhase == v1alpha1.NodePhaseNotYetStarted || isTerminalNodePhase(nodePhase) { @@ -158,7 +166,7 @@ func (a *arrayNodeHandler) Finalize(ctx context.Context, nCtx interfaces.NodeExe } if messageCollector.Length() > 0 { - return fmt.Errorf(messageCollector.Summary(events.MaxErrorMessageLength)) + return fmt.Errorf(messageCollector.Summary(events.MaxErrorMessageLength)) //nolint:govet,staticcheck } return nil @@ -191,7 +199,7 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu size := -1 - for _, variable := range literalMap.Literals { + for _, variable := range literalMap.GetLiterals() { if variable.GetOffloadedMetadata() != nil { // variable will be overwritten with the contents of the offloaded data which contains the actual large literal. // We need this for the map task to be able to create the subNodeSpec @@ -204,7 +212,7 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu } switch variable.GetValue().(type) { case *idlcore.Literal_Collection: - collectionLength := len(variable.GetCollection().Literals) + collectionLength := len(variable.GetCollection().GetLiterals()) if size == -1 { size = collectionLength } else if size != collectionLength { @@ -245,9 +253,10 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu {arrayReference: &arrayNodeState.SubNodeTaskPhases, maxValue: len(core.Phases) - 1}, {arrayReference: &arrayNodeState.SubNodeRetryAttempts, maxValue: maxAttemptsValue}, {arrayReference: &arrayNodeState.SubNodeSystemFailures, maxValue: maxSystemFailuresValue}, + {arrayReference: &arrayNodeState.SubNodeDeltaTimestamps, maxValue: MAX_DELTA_TIMESTAMP}, } { - *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) + *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) // #nosec G115 if err != nil { return handler.UnknownTransition, err } @@ -279,8 +288,8 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu break } - nodePhase := v1alpha1.NodePhase(nodePhaseUint64) - taskPhase := int(arrayNodeState.SubNodeTaskPhases.GetItem(i)) + nodePhase := v1alpha1.NodePhase(nodePhaseUint64) // #nosec G115 + taskPhase := int(arrayNodeState.SubNodeTaskPhases.GetItem(i)) // #nosec G115 // do not process nodes in terminal state if isTerminalNodePhase(nodePhase) { @@ -361,16 +370,30 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu } // update subNode state - arrayNodeState.SubNodePhases.SetItem(index, uint64(subNodeStatus.GetPhase())) + arrayNodeState.SubNodePhases.SetItem(index, uint64(subNodeStatus.GetPhase())) // #nosec G115 if subNodeStatus.GetTaskNodeStatus() == nil { // resetting task phase because during retries we clear the GetTaskNodeStatus arrayNodeState.SubNodeTaskPhases.SetItem(index, uint64(0)) } else { - arrayNodeState.SubNodeTaskPhases.SetItem(index, uint64(subNodeStatus.GetTaskNodeStatus().GetPhase())) + arrayNodeState.SubNodeTaskPhases.SetItem(index, uint64(subNodeStatus.GetTaskNodeStatus().GetPhase())) // #nosec G115 } arrayNodeState.SubNodeRetryAttempts.SetItem(index, uint64(subNodeStatus.GetAttempts())) arrayNodeState.SubNodeSystemFailures.SetItem(index, uint64(subNodeStatus.GetSystemFailures())) + if arrayNodeState.SubNodeDeltaTimestamps.BitSet != nil { + startedAt := nCtx.NodeStatus().GetLastAttemptStartedAt() + subNodeStartedAt := subNodeStatus.GetLastAttemptStartedAt() + if subNodeStartedAt == nil { + // subNodeStartedAt == nil indicates either (1) node has not started or (2) node status has + // been reset (ex. retryable failure). in both cases we set the delta timestamp to 0 + arrayNodeState.SubNodeDeltaTimestamps.SetItem(index, 0) + } else if startedAt != nil && arrayNodeState.SubNodeDeltaTimestamps.GetItem(index) == 0 { + // otherwise if `SubNodeDeltaTimestamps` is unset, we compute the delta and set it + deltaDuration := uint64(subNodeStartedAt.Time.Sub(startedAt.Time).Seconds()) + arrayNodeState.SubNodeDeltaTimestamps.SetItem(index, deltaDuration) + } + } + // increment task phase version if subNode phase or task phase changed if subNodeStatus.GetPhase() != nodeExecutionRequest.nodePhase || subNodeStatus.GetTaskNodeStatus().GetPhase() != nodeExecutionRequest.taskPhase { incrementTaskPhaseVersion = true @@ -388,7 +411,7 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu failingCount := 0 runningCount := 0 for _, nodePhaseUint64 := range arrayNodeState.SubNodePhases.GetItems() { - nodePhase := v1alpha1.NodePhase(nodePhaseUint64) + nodePhase := v1alpha1.NodePhase(nodePhaseUint64) // #nosec G115 switch nodePhase { case v1alpha1.NodePhaseSucceeded, v1alpha1.NodePhaseRecovered, v1alpha1.NodePhaseSkipped: successCount++ @@ -449,7 +472,7 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu gatherOutputsRequests := make([]*gatherOutputsRequest, 0, len(arrayNodeState.SubNodePhases.GetItems())) outputLiteralTypes := make(map[string]*idlcore.LiteralType) for i, nodePhaseUint64 := range arrayNodeState.SubNodePhases.GetItems() { - nodePhase := v1alpha1.NodePhase(nodePhaseUint64) + nodePhase := v1alpha1.NodePhase(nodePhaseUint64) // #nosec G115 gatherOutputsRequest := &gatherOutputsRequest{ ctx: ctx, responseChannel: make(chan struct { @@ -471,13 +494,13 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu continue } - if task.CoreTask() != nil && task.CoreTask().Interface != nil && task.CoreTask().Interface.Outputs != nil { - for name := range task.CoreTask().Interface.Outputs.Variables { + if task.CoreTask() != nil && task.CoreTask().GetInterface() != nil && task.CoreTask().GetInterface().GetOutputs() != nil { + for name := range task.CoreTask().GetInterface().GetOutputs().GetVariables() { outputLiterals[name] = nilLiteral // Extract the literal type from the task interface outputLiteralTypes[name] = &idlcore.LiteralType{ Type: &idlcore.LiteralType_CollectionType{ - CollectionType: task.CoreTask().Interface.Outputs.Variables[name].GetType(), + CollectionType: task.CoreTask().GetInterface().GetOutputs().GetVariables()[name].GetType(), }, } } @@ -489,7 +512,7 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu }{outputLiterals, nil} } else { // initialize subNode reader - currentAttempt := int(arrayNodeState.SubNodeRetryAttempts.GetItem(i)) + currentAttempt := int(arrayNodeState.SubNodeRetryAttempts.GetItem(i)) // #nosec G115 subDataDir, subOutputDir, err := constructOutputReferences(ctx, nCtx, strconv.Itoa(i), strconv.Itoa(currentAttempt)) if err != nil { @@ -525,7 +548,7 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu } if outputs := taskNode.CoreTask().GetInterface().GetOutputs(); outputs != nil { - for name := range outputs.Variables { + for name := range outputs.GetVariables() { outputLiteral := &idlcore.Literal{ Value: &idlcore.Literal_Collection{ Collection: &idlcore.LiteralCollection{ @@ -722,8 +745,8 @@ func New(nodeExecutor interfaces.Node, eventConfig *config.EventConfig, literalO func (a *arrayNodeHandler) buildArrayNodeContext(ctx context.Context, nCtx interfaces.NodeExecutionContext, arrayNodeState *handler.ArrayNodeState, arrayNode v1alpha1.ExecutableArrayNode, subNodeIndex int, eventRecorder arrayEventRecorder) ( interfaces.Node, executors.ExecutionContext, executors.DAGStructure, executors.NodeLookup, *v1alpha1.NodeSpec, *v1alpha1.NodeStatus, error) { - nodePhase := v1alpha1.NodePhase(arrayNodeState.SubNodePhases.GetItem(subNodeIndex)) - taskPhase := int(arrayNodeState.SubNodeTaskPhases.GetItem(subNodeIndex)) + nodePhase := v1alpha1.NodePhase(arrayNodeState.SubNodePhases.GetItem(subNodeIndex)) // #nosec G115 + taskPhase := int(arrayNodeState.SubNodeTaskPhases.GetItem(subNodeIndex)) // #nosec G115 // need to initialize the inputReader every time to ensure TaskHandler can access for cache lookups / population inputs, err := nCtx.InputReader().Get(ctx) @@ -759,22 +782,31 @@ func (a *arrayNodeHandler) buildArrayNodeContext(ctx context.Context, nCtx inter } // construct output references - currentAttempt := uint32(arrayNodeState.SubNodeRetryAttempts.GetItem(subNodeIndex)) + currentAttempt := uint32(arrayNodeState.SubNodeRetryAttempts.GetItem(subNodeIndex)) // #nosec G115 subDataDir, subOutputDir, err := constructOutputReferences(ctx, nCtx, strconv.Itoa(subNodeIndex), strconv.Itoa(int(currentAttempt))) if err != nil { return nil, nil, nil, nil, nil, nil, err } + // compute start time for subNode using delta timestamp from ArrayNode NodeStatus + var startedAt *metav1.Time + if nCtx.NodeStatus().GetLastAttemptStartedAt() != nil && arrayNodeState.SubNodeDeltaTimestamps.BitSet != nil { + if deltaSeconds := arrayNodeState.SubNodeDeltaTimestamps.GetItem(subNodeIndex); deltaSeconds != 0 { + startedAt = &metav1.Time{Time: nCtx.NodeStatus().GetLastAttemptStartedAt().Add(time.Duration(deltaSeconds) * time.Second)} // #nosec G115 + } + } + subNodeStatus := &v1alpha1.NodeStatus{ Phase: nodePhase, DataDir: subDataDir, OutputDir: subOutputDir, Attempts: currentAttempt, - SystemFailures: uint32(arrayNodeState.SubNodeSystemFailures.GetItem(subNodeIndex)), + SystemFailures: uint32(arrayNodeState.SubNodeSystemFailures.GetItem(subNodeIndex)), // #nosec G115 TaskNodeStatus: &v1alpha1.TaskNodeStatus{ Phase: taskPhase, PluginState: pluginStateBytes, }, + LastAttemptStartedAt: startedAt, } // initialize mocks diff --git a/flytepropeller/pkg/controller/nodes/array/handler_test.go b/flytepropeller/pkg/controller/nodes/array/handler_test.go index 1b3fdff8e6..91e6533b5e 100644 --- a/flytepropeller/pkg/controller/nodes/array/handler_test.go +++ b/flytepropeller/pkg/controller/nodes/array/handler_test.go @@ -4,9 +4,11 @@ import ( "context" "fmt" "testing" + "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" idlcore "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" @@ -184,9 +186,15 @@ func createNodeExecutionContext(dataStore *storage.DataStore, eventRecorder inte nCtx.OnNodeStateWriter().Return(nodeStateWriter) // NodeStatus + nowMinus := time.Now().Add(time.Duration(-5) * time.Second) + metav1NowMinus := metav1.Time{ + Time: nowMinus, + } nCtx.OnNodeStatus().Return(&v1alpha1.NodeStatus{ - DataDir: storage.DataReference("s3://bucket/data"), - OutputDir: storage.DataReference("s3://bucket/output"), + DataDir: storage.DataReference("s3://bucket/data"), + OutputDir: storage.DataReference("s3://bucket/output"), + LastAttemptStartedAt: &metav1NowMinus, + StartedAt: &metav1NowMinus, }) return nCtx @@ -252,17 +260,18 @@ func TestAbort(t *testing.T) { {arrayReference: &arrayNodeState.SubNodeTaskPhases, maxValue: len(core.Phases) - 1}, {arrayReference: &arrayNodeState.SubNodeRetryAttempts, maxValue: 1}, {arrayReference: &arrayNodeState.SubNodeSystemFailures, maxValue: 1}, + {arrayReference: &arrayNodeState.SubNodeDeltaTimestamps, maxValue: 1024}, } { - *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) + *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) // #nosec G115 assert.NoError(t, err) } for i, nodePhase := range test.subNodePhases { - arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) + arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) // #nosec G115 } for i, taskPhase := range test.subNodeTaskPhases { - arrayNodeState.SubNodeTaskPhases.SetItem(i, bitarray.Item(taskPhase)) + arrayNodeState.SubNodeTaskPhases.SetItem(i, bitarray.Item(taskPhase)) // #nosec G115 } // create NodeExecutionContext @@ -277,10 +286,10 @@ func TestAbort(t *testing.T) { if len(test.expectedExternalResourcePhases) > 0 { assert.Equal(t, 1, len(eventRecorder.taskExecutionEvents)) - externalResources := eventRecorder.taskExecutionEvents[0].Metadata.GetExternalResources() + externalResources := eventRecorder.taskExecutionEvents[0].GetMetadata().GetExternalResources() assert.Equal(t, len(test.expectedExternalResourcePhases), len(externalResources)) for i, expectedPhase := range test.expectedExternalResourcePhases { - assert.Equal(t, expectedPhase, externalResources[i].Phase) + assert.Equal(t, expectedPhase, externalResources[i].GetPhase()) } } else { assert.Equal(t, 0, len(eventRecorder.taskExecutionEvents)) @@ -348,17 +357,17 @@ func TestFinalize(t *testing.T) { {arrayReference: &arrayNodeState.SubNodeTaskPhases, maxValue: len(core.Phases) - 1}, {arrayReference: &arrayNodeState.SubNodeRetryAttempts, maxValue: 1}, {arrayReference: &arrayNodeState.SubNodeSystemFailures, maxValue: 1}, + {arrayReference: &arrayNodeState.SubNodeDeltaTimestamps, maxValue: 1024}, } { - - *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) + *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) // #nosec G115 assert.NoError(t, err) } for i, nodePhase := range test.subNodePhases { - arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) + arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) // #nosec G115 } for i, taskPhase := range test.subNodeTaskPhases { - arrayNodeState.SubNodeTaskPhases.SetItem(i, bitarray.Item(taskPhase)) + arrayNodeState.SubNodeTaskPhases.SetItem(i, bitarray.Item(taskPhase)) // #nosec G115 } // create NodeExecutionContext @@ -447,10 +456,10 @@ func TestHandleArrayNodePhaseNone(t *testing.T) { if len(test.expectedExternalResourcePhases) > 0 { assert.Equal(t, 1, len(eventRecorder.taskExecutionEvents)) - externalResources := eventRecorder.taskExecutionEvents[0].Metadata.GetExternalResources() + externalResources := eventRecorder.taskExecutionEvents[0].GetMetadata().GetExternalResources() assert.Equal(t, len(test.expectedExternalResourcePhases), len(externalResources)) for i, expectedPhase := range test.expectedExternalResourcePhases { - assert.Equal(t, expectedPhase, externalResources[i].Phase) + assert.Equal(t, expectedPhase, externalResources[i].GetPhase()) } } else { assert.Equal(t, 0, len(eventRecorder.taskExecutionEvents)) @@ -475,7 +484,7 @@ func (f *fakeEventRecorder) RecordNodeEvent(ctx context.Context, event *event.No func (f *fakeEventRecorder) RecordTaskEvent(ctx context.Context, event *event.TaskExecutionEvent, eventConfig *config.EventConfig) error { f.recordTaskEventCallCount++ - if f.phaseVersionFailures == 0 || event.PhaseVersion < f.phaseVersionFailures { + if f.phaseVersionFailures == 0 || event.GetPhaseVersion() < f.phaseVersionFailures { return f.taskErr } return nil @@ -507,25 +516,27 @@ func TestHandleArrayNodePhaseExecuting(t *testing.T) { } tests := []struct { - name string - parallelism *uint32 - minSuccessRatio *float32 - subNodePhases []v1alpha1.NodePhase - subNodeTaskPhases []core.Phase - subNodeTransitions []handler.Transition - expectedArrayNodePhase v1alpha1.ArrayNodePhase - expectedArrayNodeSubPhases []v1alpha1.NodePhase - expectedTransitionPhase handler.EPhase - expectedExternalResourcePhases []idlcore.TaskExecution_Phase - currentWfParallelism uint32 - maxWfParallelism uint32 - incrementParallelismCount uint32 - useFakeEventRecorder bool - eventRecorderFailures uint32 - eventRecorderError error - expectedTaskPhaseVersion uint32 - expectHandleError bool - expectedEventingCalls int + name string + parallelism *uint32 + minSuccessRatio *float32 + subNodePhases []v1alpha1.NodePhase + subNodeTaskPhases []core.Phase + subNodeDeltaTimestamps []uint64 + subNodeTransitions []handler.Transition + expectedArrayNodePhase v1alpha1.ArrayNodePhase + expectedArrayNodeSubPhases []v1alpha1.NodePhase + expectedDiffArrayNodeSubDeltaTimestamps []bool + expectedTransitionPhase handler.EPhase + expectedExternalResourcePhases []idlcore.TaskExecution_Phase + currentWfParallelism uint32 + maxWfParallelism uint32 + incrementParallelismCount uint32 + useFakeEventRecorder bool + eventRecorderFailures uint32 + eventRecorderError error + expectedTaskPhaseVersion uint32 + expectHandleError bool + expectedEventingCalls int }{ { name: "StartAllSubNodes", @@ -828,6 +839,31 @@ func TestHandleArrayNodePhaseExecuting(t *testing.T) { expectHandleError: true, expectedEventingCalls: 1, }, + { + name: "DeltaTimestampUpdates", + parallelism: uint32Ptr(0), + subNodePhases: []v1alpha1.NodePhase{ + v1alpha1.NodePhaseQueued, + v1alpha1.NodePhaseRunning, + }, + subNodeTaskPhases: []core.Phase{ + core.PhaseUndefined, + core.PhaseUndefined, + }, + subNodeTransitions: []handler.Transition{ + handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoRunning(&handler.ExecutionInfo{})), + handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoRetryableFailure(idlcore.ExecutionError_SYSTEM, "", "", &handler.ExecutionInfo{})), + }, + expectedArrayNodePhase: v1alpha1.ArrayNodePhaseExecuting, + expectedArrayNodeSubPhases: []v1alpha1.NodePhase{ + v1alpha1.NodePhaseRunning, + v1alpha1.NodePhaseRetryableFailure, + }, + expectedTaskPhaseVersion: 1, + expectedTransitionPhase: handler.EPhaseRunning, + expectedExternalResourcePhases: []idlcore.TaskExecution_Phase{idlcore.TaskExecution_RUNNING, idlcore.TaskExecution_FAILED}, + incrementParallelismCount: 1, + }, } for _, test := range tests { @@ -859,14 +895,18 @@ func TestHandleArrayNodePhaseExecuting(t *testing.T) { {arrayReference: &arrayNodeState.SubNodeTaskPhases, maxValue: len(core.Phases) - 1}, {arrayReference: &arrayNodeState.SubNodeRetryAttempts, maxValue: 1}, {arrayReference: &arrayNodeState.SubNodeSystemFailures, maxValue: 1}, + {arrayReference: &arrayNodeState.SubNodeDeltaTimestamps, maxValue: 1024}, } { - - *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) + *item.arrayReference, err = bitarray.NewCompactArray(uint(size), bitarray.Item(item.maxValue)) // #nosec G115 assert.NoError(t, err) } for i, nodePhase := range test.subNodePhases { - arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) + arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) // #nosec G115 + } + + for i, deltaTimestmap := range test.subNodeDeltaTimestamps { + arrayNodeState.SubNodeDeltaTimestamps.SetItem(i, deltaTimestmap) // #nosec G115 } nodeSpec := arrayNodeSpec @@ -921,7 +961,15 @@ func TestHandleArrayNodePhaseExecuting(t *testing.T) { assert.Equal(t, test.expectedTaskPhaseVersion, arrayNodeState.TaskPhaseVersion) for i, expectedPhase := range test.expectedArrayNodeSubPhases { - assert.Equal(t, expectedPhase, v1alpha1.NodePhase(arrayNodeState.SubNodePhases.GetItem(i))) + assert.Equal(t, expectedPhase, v1alpha1.NodePhase(arrayNodeState.SubNodePhases.GetItem(i))) // #nosec G115 + } + + for i, expectedDiffDeltaTimestamps := range test.expectedDiffArrayNodeSubDeltaTimestamps { + if expectedDiffDeltaTimestamps { + assert.NotEqual(t, arrayNodeState.SubNodeDeltaTimestamps.GetItem(i), test.subNodeDeltaTimestamps[i]) + } else { + assert.Equal(t, arrayNodeState.SubNodeDeltaTimestamps.GetItem(i), test.subNodeDeltaTimestamps[i]) + } } bufferedEventRecorder, ok := eventRecorder.(*bufferedEventRecorder) @@ -929,10 +977,10 @@ func TestHandleArrayNodePhaseExecuting(t *testing.T) { if len(test.expectedExternalResourcePhases) > 0 { assert.Equal(t, 1, len(bufferedEventRecorder.taskExecutionEvents)) - externalResources := bufferedEventRecorder.taskExecutionEvents[0].Metadata.GetExternalResources() + externalResources := bufferedEventRecorder.taskExecutionEvents[0].GetMetadata().GetExternalResources() assert.Equal(t, len(test.expectedExternalResourcePhases), len(externalResources)) for i, expectedPhase := range test.expectedExternalResourcePhases { - assert.Equal(t, expectedPhase, externalResources[i].Phase) + assert.Equal(t, expectedPhase, externalResources[i].GetPhase()) } } else { assert.Equal(t, 0, len(bufferedEventRecorder.taskExecutionEvents)) @@ -1000,8 +1048,8 @@ func TestHandle_InvalidLiteralType(t *testing.T) { // Validate results assert.Equal(t, test.expectedTransitionType, transition.Type()) assert.Equal(t, test.expectedPhase, transition.Info().GetPhase()) - assert.Equal(t, test.expectedErrorCode, transition.Info().GetErr().Code) - assert.Contains(t, transition.Info().GetErr().Message, test.expectedContainedErrorMsg) + assert.Equal(t, test.expectedErrorCode, transition.Info().GetErr().GetCode()) + assert.Contains(t, transition.Info().GetErr().GetMessage(), test.expectedContainedErrorMsg) }) } } @@ -1175,7 +1223,7 @@ func TestHandleArrayNodePhaseSucceeding(t *testing.T) { subNodePhases, err := bitarray.NewCompactArray(uint(len(test.subNodePhases)), bitarray.Item(v1alpha1.NodePhaseRecovered)) assert.NoError(t, err) for i, nodePhase := range test.subNodePhases { - subNodePhases.SetItem(i, bitarray.Item(nodePhase)) + subNodePhases.SetItem(i, bitarray.Item(nodePhase)) // #nosec G115 } retryAttempts, err := bitarray.NewCompactArray(uint(len(test.subNodePhases)), bitarray.Item(1)) @@ -1303,14 +1351,14 @@ func TestHandleArrayNodePhaseFailing(t *testing.T) { {arrayReference: &arrayNodeState.SubNodeTaskPhases, maxValue: len(core.Phases) - 1}, {arrayReference: &arrayNodeState.SubNodeRetryAttempts, maxValue: 1}, {arrayReference: &arrayNodeState.SubNodeSystemFailures, maxValue: 1}, + {arrayReference: &arrayNodeState.SubNodeDeltaTimestamps, maxValue: 1024}, } { - - *item.arrayReference, err = bitarray.NewCompactArray(uint(len(test.subNodePhases)), bitarray.Item(item.maxValue)) + *item.arrayReference, err = bitarray.NewCompactArray(uint(len(test.subNodePhases)), bitarray.Item(item.maxValue)) // #nosec G115 assert.NoError(t, err) } for i, nodePhase := range test.subNodePhases { - arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) + arrayNodeState.SubNodePhases.SetItem(i, bitarray.Item(nodePhase)) // #nosec G115 } // create NodeExecutionContext diff --git a/flytepropeller/pkg/controller/nodes/array/node_execution_context.go b/flytepropeller/pkg/controller/nodes/array/node_execution_context.go index b2c9619695..d55de708c0 100644 --- a/flytepropeller/pkg/controller/nodes/array/node_execution_context.go +++ b/flytepropeller/pkg/controller/nodes/array/node_execution_context.go @@ -29,12 +29,12 @@ func newStaticInputReader(inputPaths io.InputFilePaths, input *core.LiteralMap) func constructLiteralMap(inputs *core.LiteralMap, index int) (*core.LiteralMap, error) { literals := make(map[string]*core.Literal) - for name, literal := range inputs.Literals { + for name, literal := range inputs.GetLiterals() { if literalCollection := literal.GetCollection(); literalCollection != nil { - if index >= len(literalCollection.Literals) { + if index >= len(literalCollection.GetLiterals()) { return nil, fmt.Errorf("index %v out of bounds for literal collection %v", index, name) } - literals[name] = literalCollection.Literals[index] + literals[name] = literalCollection.GetLiterals()[index] } else { literals[name] = literal } @@ -57,12 +57,12 @@ func (a *arrayTaskReader) Read(ctx context.Context) (*core.TaskTemplate, error) // convert output list variable to singular outputVariables := make(map[string]*core.Variable) - for key, value := range originalTaskTemplate.Interface.Outputs.Variables { - switch v := value.Type.Type.(type) { + for key, value := range originalTaskTemplate.GetInterface().GetOutputs().GetVariables() { + switch v := value.GetType().GetType().(type) { case *core.LiteralType_CollectionType: outputVariables[key] = &core.Variable{ Type: v.CollectionType, - Description: value.Description, + Description: value.GetDescription(), } default: outputVariables[key] = value @@ -71,7 +71,7 @@ func (a *arrayTaskReader) Read(ctx context.Context) (*core.TaskTemplate, error) taskTemplate := *originalTaskTemplate taskTemplate.Interface = &core.TypedInterface{ - Inputs: originalTaskTemplate.Interface.Inputs, + Inputs: originalTaskTemplate.GetInterface().GetInputs(), Outputs: &core.VariableMap{ Variables: outputVariables, }, diff --git a/flytepropeller/pkg/controller/nodes/array/utils_test.go b/flytepropeller/pkg/controller/nodes/array/utils_test.go index 2b2c030cd6..eeddd827ea 100644 --- a/flytepropeller/pkg/controller/nodes/array/utils_test.go +++ b/flytepropeller/pkg/controller/nodes/array/utils_test.go @@ -29,10 +29,10 @@ func TestAppendLiteral(t *testing.T) { } for _, v := range outputLiterals { - collection, ok := v.Value.(*idlcore.Literal_Collection) + collection, ok := v.GetValue().(*idlcore.Literal_Collection) assert.True(t, ok) - assert.Equal(t, 2, len(collection.Collection.Literals)) + assert.Equal(t, 2, len(collection.Collection.GetLiterals())) } } diff --git a/flytepropeller/pkg/controller/nodes/attr_path_resolver_test.go b/flytepropeller/pkg/controller/nodes/attr_path_resolver_test.go index e8e28ac08f..f617025ed9 100644 --- a/flytepropeller/pkg/controller/nodes/attr_path_resolver_test.go +++ b/flytepropeller/pkg/controller/nodes/attr_path_resolver_test.go @@ -520,7 +520,7 @@ func TestResolveAttrPathInBinary(t *testing.T) { collection := make([]any, len(literals)) for i, l := range literals { var v any - _ = msgpack.Unmarshal(l.GetScalar().GetBinary().Value, &v) + _ = msgpack.Unmarshal(l.GetScalar().GetBinary().GetValue(), &v) collection[i] = v } return collection @@ -1434,10 +1434,10 @@ func TestResolveAttrPathInBinary(t *testing.T) { // Helper function to unmarshal a Binary Literal into an any unmarshalBinaryLiteral := func(literal *core.Literal) (any, error) { - if scalar, ok := literal.Value.(*core.Literal_Scalar); ok { - if binary, ok := scalar.Scalar.Value.(*core.Scalar_Binary); ok { + if scalar, ok := literal.GetValue().(*core.Literal_Scalar); ok { + if binary, ok := scalar.Scalar.GetValue().(*core.Scalar_Binary); ok { var value any - err := msgpack.Unmarshal(binary.Binary.Value, &value) + err := msgpack.Unmarshal(binary.Binary.GetValue(), &value) return value, err } } diff --git a/flytepropeller/pkg/controller/nodes/branch/comparator.go b/flytepropeller/pkg/controller/nodes/branch/comparator.go index cfac3be0af..b52456ea51 100644 --- a/flytepropeller/pkg/controller/nodes/branch/comparator.go +++ b/flytepropeller/pkg/controller/nodes/branch/comparator.go @@ -72,14 +72,14 @@ var perTypeComparators = map[string]comparators{ } func Evaluate(lValue *core.Primitive, rValue *core.Primitive, op core.ComparisonExpression_Operator) (bool, error) { - lValueType := reflect.TypeOf(lValue.Value) - rValueType := reflect.TypeOf(rValue.Value) + lValueType := reflect.TypeOf(lValue.GetValue()) + rValueType := reflect.TypeOf(rValue.GetValue()) if lValueType != rValueType { return false, errors.Errorf(ErrorCodeMalformedBranch, "Comparison between different primitives types. lVal[%v]:rVal[%v]", lValueType, rValueType) } comps, ok := perTypeComparators[lValueType.String()] if !ok { - return false, errors.Errorf("Comparator not defined for type: [%v]", lValueType.String()) + return false, errors.Errorf("Comparator not defined for type: [%v]", lValueType.String()) //nolint:govet,staticcheck } isBoolean := false if lValueType.String() == primitiveBooleanType { diff --git a/flytepropeller/pkg/controller/nodes/branch/evaluator.go b/flytepropeller/pkg/controller/nodes/branch/evaluator.go index 4bc1676745..c6d717cfe8 100644 --- a/flytepropeller/pkg/controller/nodes/branch/evaluator.go +++ b/flytepropeller/pkg/controller/nodes/branch/evaluator.go @@ -27,7 +27,7 @@ func EvaluateComparison(expr *core.ComparisonExpression, nodeInputs *core.Litera if nodeInputs == nil { return false, errors.Errorf(ErrorCodeMalformedBranch, "Failed to find Value for Variable [%v]", expr.GetLeftValue().GetVar()) } - lValue = nodeInputs.Literals[expr.GetLeftValue().GetVar()] + lValue = nodeInputs.GetLiterals()[expr.GetLeftValue().GetVar()] if lValue == nil { return false, errors.Errorf(ErrorCodeMalformedBranch, "Failed to find Value for Variable [%v]", expr.GetLeftValue().GetVar()) } @@ -39,7 +39,7 @@ func EvaluateComparison(expr *core.ComparisonExpression, nodeInputs *core.Litera if nodeInputs == nil { return false, errors.Errorf(ErrorCodeMalformedBranch, "Failed to find Value for Variable [%v]", expr.GetLeftValue().GetVar()) } - rValue = nodeInputs.Literals[expr.GetRightValue().GetVar()] + rValue = nodeInputs.GetLiterals()[expr.GetRightValue().GetVar()] if rValue == nil { return false, errors.Errorf(ErrorCodeMalformedBranch, "Failed to find Value for Variable [%v]", expr.GetRightValue().GetVar()) } @@ -136,7 +136,7 @@ func DecideBranch(ctx context.Context, nl executors.NodeLookup, nodeID v1alpha1. if selectedNodeID == nil { if node.GetElseFail() != nil { - return nil, errors.Errorf(ErrorCodeUserProvidedError, node.GetElseFail().Message) + return nil, errors.Errorf(ErrorCodeUserProvidedError, node.GetElseFail().GetMessage()) //nolint:govet,staticcheck } return nil, errors.Errorf(ErrorCodeMalformedBranch, "No branch satisfied") } diff --git a/flytepropeller/pkg/controller/nodes/branch/evaluator_test.go b/flytepropeller/pkg/controller/nodes/branch/evaluator_test.go index dae8a1337b..ae29572a22 100644 --- a/flytepropeller/pkg/controller/nodes/branch/evaluator_test.go +++ b/flytepropeller/pkg/controller/nodes/branch/evaluator_test.go @@ -279,7 +279,7 @@ func TestEvaluateBooleanExpression(t *testing.T) { }, } - for k, v := range inputs.Literals { + for k, v := range inputs.GetLiterals() { outerInputs.Literals[k] = v } diff --git a/flytepropeller/pkg/controller/nodes/branch/handler.go b/flytepropeller/pkg/controller/nodes/branch/handler.go index 431f5fa3eb..9789b65c22 100644 --- a/flytepropeller/pkg/controller/nodes/branch/handler.go +++ b/flytepropeller/pkg/controller/nodes/branch/handler.go @@ -183,7 +183,7 @@ func (b *branchHandler) Abort(ctx context.Context, nCtx interfaces.NodeExecution // We should never reach here, but for safety and completeness errMsg := "branch evaluation failed" if branch.GetElseFail() != nil { - errMsg = branch.GetElseFail().Message + errMsg = branch.GetElseFail().GetMessage() } logger.Errorf(ctx, errMsg) return nil @@ -227,7 +227,7 @@ func (b *branchHandler) Finalize(ctx context.Context, nCtx interfaces.NodeExecut // We should never reach here, but for safety and completeness errMsg := "branch evaluation failed" if branch.GetElseFail() != nil { - errMsg = branch.GetElseFail().Message + errMsg = branch.GetElseFail().GetMessage() } logger.Errorf(ctx, "failed to evaluate branch - user error: %s", errMsg) return nil diff --git a/flytepropeller/pkg/controller/nodes/cache.go b/flytepropeller/pkg/controller/nodes/cache.go index e8e7fc3720..5b1188aa56 100644 --- a/flytepropeller/pkg/controller/nodes/cache.go +++ b/flytepropeller/pkg/controller/nodes/cache.go @@ -105,12 +105,12 @@ func (n *nodeExecutor) CheckCatalogCache(ctx context.Context, nCtx interfaces.No return entry, nil } - logger.Infof(ctx, "Catalog CacheHit: for task [%s/%s/%s/%s]", catalogKey.Identifier.Project, - catalogKey.Identifier.Domain, catalogKey.Identifier.Name, catalogKey.Identifier.Version) + logger.Infof(ctx, "Catalog CacheHit: for task [%s/%s/%s/%s]", catalogKey.Identifier.GetProject(), + catalogKey.Identifier.GetDomain(), catalogKey.Identifier.GetName(), catalogKey.Identifier.GetVersion()) n.metrics.catalogHitCount.Inc(ctx) iface := catalogKey.TypedInterface - if iface.Outputs != nil && len(iface.Outputs.Variables) > 0 { + if iface.GetOutputs() != nil && len(iface.GetOutputs().GetVariables()) > 0 { // copy cached outputs to node outputs o, ee, err := entry.GetOutputs().Read(ctx) if err != nil { @@ -157,15 +157,15 @@ func (n *nodeExecutor) GetOrExtendCatalogReservation(ctx context.Context, nCtx i } var status core.CatalogReservation_Status - if reservation.OwnerId == ownerID { + if reservation.GetOwnerId() == ownerID { status = core.CatalogReservation_RESERVATION_ACQUIRED } else { status = core.CatalogReservation_RESERVATION_EXISTS } n.metrics.reservationGetSuccessCount.Inc(ctx) - return catalog.NewReservationEntry(reservation.ExpiresAt.AsTime(), - reservation.HeartbeatInterval.AsDuration(), reservation.OwnerId, status), nil + return catalog.NewReservationEntry(reservation.GetExpiresAt().AsTime(), + reservation.GetHeartbeatInterval().AsDuration(), reservation.GetOwnerId(), status), nil } // ReleaseCatalogReservation attempts to release an artifact reservation if the task is cacheable @@ -208,12 +208,12 @@ func (n *nodeExecutor) WriteCatalogCache(ctx context.Context, nCtx interfaces.No } iface := catalogKey.TypedInterface - if iface.Outputs != nil && len(iface.Outputs.Variables) == 0 { + if iface.GetOutputs() != nil && len(iface.GetOutputs().GetVariables()) == 0 { return catalog.NewStatus(core.CatalogCacheStatus_CACHE_DISABLED, nil), nil } - logger.Infof(ctx, "Catalog CacheEnabled. recording execution [%s/%s/%s/%s]", catalogKey.Identifier.Project, - catalogKey.Identifier.Domain, catalogKey.Identifier.Name, catalogKey.Identifier.Version) + logger.Infof(ctx, "Catalog CacheEnabled. recording execution [%s/%s/%s/%s]", catalogKey.Identifier.GetProject(), + catalogKey.Identifier.GetDomain(), catalogKey.Identifier.GetName(), catalogKey.Identifier.GetVersion()) outputPaths := ioutils.NewReadOnlyOutputFilePaths(ctx, nCtx.DataStore(), nCtx.NodeStatus().GetOutputDir()) outputReader := ioutils.NewRemoteFileOutputReader(ctx, nCtx.DataStore(), outputPaths, 0) diff --git a/flytepropeller/pkg/controller/nodes/cache_test.go b/flytepropeller/pkg/controller/nodes/cache_test.go index fa9eecadb2..f6c57b31de 100644 --- a/flytepropeller/pkg/controller/nodes/cache_test.go +++ b/flytepropeller/pkg/controller/nodes/cache_test.go @@ -128,11 +128,11 @@ func TestUpdatePhaseCacheInfo(t *testing.T) { // ensure cache and reservation status' are being set correctly if test.cacheStatus != nil { - assert.Equal(t, cacheStatus.GetCacheStatus(), phaseInfo.GetInfo().TaskNodeInfo.TaskNodeMetadata.CacheStatus) + assert.Equal(t, cacheStatus.GetCacheStatus(), phaseInfo.GetInfo().TaskNodeInfo.TaskNodeMetadata.GetCacheStatus()) } if test.reservationStatus != nil { - assert.Equal(t, reservationStatus, phaseInfo.GetInfo().TaskNodeInfo.TaskNodeMetadata.ReservationStatus) + assert.Equal(t, reservationStatus, phaseInfo.GetInfo().TaskNodeInfo.TaskNodeMetadata.GetReservationStatus()) } }) } diff --git a/flytepropeller/pkg/controller/nodes/catalog/config.go b/flytepropeller/pkg/controller/nodes/catalog/config.go index 4dd7bc70ae..cededafcae 100644 --- a/flytepropeller/pkg/controller/nodes/catalog/config.go +++ b/flytepropeller/pkg/controller/nodes/catalog/config.go @@ -82,7 +82,7 @@ func NewCatalogClient(ctx context.Context, authOpt ...grpc.DialOption) (catalog. case DataCatalogType: return datacatalog.NewDataCatalog(ctx, catalogConfig.Endpoint, catalogConfig.Insecure, catalogConfig.MaxCacheAge.Duration, catalogConfig.UseAdminAuth, catalogConfig.DefaultServiceConfig, - uint(catalogConfig.MaxRetries), catalogConfig.BackoffScalar, catalogConfig.GetBackoffJitter(ctx), authOpt...) + uint(catalogConfig.MaxRetries), catalogConfig.BackoffScalar, catalogConfig.GetBackoffJitter(ctx), authOpt...) // #nosec G115 case NoOpDiscoveryType, "": return NOOPCatalog{}, nil } diff --git a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog.go b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog.go index 00a99d6c54..b10f5d0291 100644 --- a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog.go +++ b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog.go @@ -54,14 +54,14 @@ func (m *CatalogClient) GetDataset(ctx context.Context, key catalog.Key) (*datac return nil, err } - return datasetResponse.Dataset, nil + return datasetResponse.GetDataset(), nil } // GetArtifactByTag retrieves an artifact using the provided tag and dataset. func (m *CatalogClient) GetArtifactByTag(ctx context.Context, tagName string, dataset *datacatalog.Dataset) (*datacatalog.Artifact, error) { logger.Debugf(ctx, "Get Artifact by tag %v", tagName) artifactQuery := &datacatalog.GetArtifactRequest{ - Dataset: dataset.Id, + Dataset: dataset.GetId(), QueryHandle: &datacatalog.GetArtifactRequest_TagName{ TagName: tagName, }, @@ -73,21 +73,21 @@ func (m *CatalogClient) GetArtifactByTag(ctx context.Context, tagName string, da // check artifact's age if the configuration specifies a max age if m.maxCacheAge > time.Duration(0) { - artifact := response.Artifact - createdAt, err := ptypes.Timestamp(artifact.CreatedAt) + artifact := response.GetArtifact() + createdAt, err := ptypes.Timestamp(artifact.GetCreatedAt()) if err != nil { - logger.Errorf(ctx, "DataCatalog Artifact has invalid createdAt %+v, err: %+v", artifact.CreatedAt, err) + logger.Errorf(ctx, "DataCatalog Artifact has invalid createdAt %+v, err: %+v", artifact.GetCreatedAt(), err) return nil, err } if time.Since(createdAt) > m.maxCacheAge { logger.Warningf(ctx, "Expired Cached Artifact %v created on %v, older than max age %v", - artifact.Id, createdAt.String(), m.maxCacheAge) + artifact.GetId(), createdAt.String(), m.maxCacheAge) return nil, status.Error(codes.NotFound, "Artifact over age limit") } } - return response.Artifact, nil + return response.GetArtifact(), nil } // Get the cached task execution from Catalog. @@ -103,7 +103,7 @@ func (m *CatalogClient) Get(ctx context.Context, key catalog.Key) (catalog.Entry } inputs := &core.LiteralMap{} - if key.TypedInterface.Inputs != nil { + if key.TypedInterface.GetInputs() != nil { retInputs, err := key.InputReader.Get(ctx) if err != nil { return catalog.Entry{}, errors.Wrap(err, "failed to read inputs when trying to query catalog") @@ -139,11 +139,11 @@ func (m *CatalogClient) Get(ctx context.Context, key catalog.Key) (catalog.Entry outputs, err := GenerateTaskOutputsFromArtifact(key.Identifier, key.TypedInterface, artifact) if err != nil { - logger.Errorf(ctx, "DataCatalog failed to get outputs from artifact %+v, err: %+v", artifact.Id, err) + logger.Errorf(ctx, "DataCatalog failed to get outputs from artifact %+v, err: %+v", artifact.GetId(), err) return catalog.NewCatalogEntry(ioutils.NewInMemoryOutputReader(outputs, nil, nil), catalog.NewStatus(core.CatalogCacheStatus_CACHE_MISS, md)), err } - logger.Infof(ctx, "Retrieved %v outputs from artifact %v, tag: %v", len(outputs.Literals), artifact.Id, tag) + logger.Infof(ctx, "Retrieved %v outputs from artifact %v, tag: %v", len(outputs.GetLiterals()), artifact.GetId(), tag) return catalog.NewCatalogEntry(ioutils.NewInMemoryOutputReader(outputs, nil, nil), catalog.NewStatus(core.CatalogCacheStatus_CACHE_HIT, md)), nil } @@ -178,7 +178,7 @@ func (m *CatalogClient) createDataset(ctx context.Context, key catalog.Key, meta func (m *CatalogClient) prepareInputsAndOutputs(ctx context.Context, key catalog.Key, reader io.OutputReader) (inputs *core.LiteralMap, outputs *core.LiteralMap, err error) { inputs = &core.LiteralMap{} outputs = &core.LiteralMap{} - if key.TypedInterface.Inputs != nil && len(key.TypedInterface.Inputs.Variables) != 0 { + if key.TypedInterface.GetInputs() != nil && len(key.TypedInterface.GetInputs().GetVariables()) != 0 { retInputs, err := key.InputReader.Get(ctx) if err != nil { logger.Errorf(ctx, "DataCatalog failed to read inputs err: %s", err) @@ -188,7 +188,7 @@ func (m *CatalogClient) prepareInputsAndOutputs(ctx context.Context, key catalog inputs = retInputs } - if key.TypedInterface.Outputs != nil && len(key.TypedInterface.Outputs.Variables) != 0 { + if key.TypedInterface.GetOutputs() != nil && len(key.TypedInterface.GetOutputs().GetVariables()) != 0 { retOutputs, retErr, err := reader.Read(ctx) if err != nil { logger.Errorf(ctx, "DataCatalog failed to read outputs err: %s", err) @@ -211,8 +211,8 @@ func (m *CatalogClient) createArtifact(ctx context.Context, key catalog.Key, dat logger.Debugf(ctx, "Creating artifact for key %+v, dataset %+v and execution %+v", key, datasetID, metadata) // Create the artifact for the execution that belongs in the task - artifactDataList := make([]*datacatalog.ArtifactData, 0, len(outputs.Literals)) - for name, value := range outputs.Literals { + artifactDataList := make([]*datacatalog.ArtifactData, 0, len(outputs.GetLiterals())) + for name, value := range outputs.GetLiterals() { artifactData := &datacatalog.ArtifactData{ Name: name, Value: value, @@ -230,15 +230,15 @@ func (m *CatalogClient) createArtifact(ctx context.Context, key catalog.Key, dat createArtifactRequest := &datacatalog.CreateArtifactRequest{Artifact: cachedArtifact} _, err := m.client.CreateArtifact(ctx, createArtifactRequest) if err != nil { - logger.Errorf(ctx, "Failed to create Artifact %+v, err: %v", cachedArtifact.Id, err) + logger.Errorf(ctx, "Failed to create Artifact %+v, err: %v", cachedArtifact.GetId(), err) return catalog.Status{}, err } - logger.Debugf(ctx, "Created artifact: %v, with %v outputs from execution %+v", cachedArtifact.Id, len(artifactDataList), metadata) + logger.Debugf(ctx, "Created artifact: %v, with %v outputs from execution %+v", cachedArtifact.GetId(), len(artifactDataList), metadata) // Tag the artifact since it is the cached artifact tagName, err := GenerateArtifactTagName(ctx, inputs, key.CacheIgnoreInputVars) if err != nil { - logger.Errorf(ctx, "Failed to generate tag for artifact %+v, err: %+v", cachedArtifact.Id, err) + logger.Errorf(ctx, "Failed to generate tag for artifact %+v, err: %+v", cachedArtifact.GetId(), err) return catalog.Status{}, err } logger.Infof(ctx, "Cached exec tag: %v, task: %v", tagName, key.Identifier) @@ -247,19 +247,19 @@ func (m *CatalogClient) createArtifact(ctx context.Context, key catalog.Key, dat tag := &datacatalog.Tag{ Name: tagName, Dataset: datasetID, - ArtifactId: cachedArtifact.Id, + ArtifactId: cachedArtifact.GetId(), } _, err = m.client.AddTag(ctx, &datacatalog.AddTagRequest{Tag: tag}) if err != nil { if status.Code(err) == codes.AlreadyExists { - logger.Warnf(ctx, "Tag %v already exists for Artifact %v (idempotent)", tagName, cachedArtifact.Id) + logger.Warnf(ctx, "Tag %v already exists for Artifact %v (idempotent)", tagName, cachedArtifact.GetId()) } else { - logger.Errorf(ctx, "Failed to add tag %+v for artifact %+v, err: %+v", tagName, cachedArtifact.Id, err) + logger.Errorf(ctx, "Failed to add tag %+v for artifact %+v, err: %+v", tagName, cachedArtifact.GetId(), err) return catalog.Status{}, err } } - logger.Debugf(ctx, "Successfully created artifact %+v for key %+v, dataset %+v and execution %+v", cachedArtifact.Id, key, datasetID, metadata) + logger.Debugf(ctx, "Successfully created artifact %+v for key %+v, dataset %+v and execution %+v", cachedArtifact.GetId(), key, datasetID, metadata) return catalog.NewStatus(core.CatalogCacheStatus_CACHE_POPULATED, EventCatalogMetadata(datasetID, tag, nil)), nil } @@ -267,8 +267,8 @@ func (m *CatalogClient) createArtifact(ctx context.Context, key catalog.Key, dat func (m *CatalogClient) updateArtifact(ctx context.Context, key catalog.Key, datasetID *datacatalog.DatasetID, inputs *core.LiteralMap, outputs *core.LiteralMap, metadata catalog.Metadata) (catalog.Status, error) { logger.Debugf(ctx, "Updating artifact for key %+v, dataset %+v and execution %+v", key, datasetID, metadata) - artifactDataList := make([]*datacatalog.ArtifactData, 0, len(outputs.Literals)) - for name, value := range outputs.Literals { + artifactDataList := make([]*datacatalog.ArtifactData, 0, len(outputs.GetLiterals())) + for name, value := range outputs.GetLiterals() { artifactData := &datacatalog.ArtifactData{ Name: name, Value: value, @@ -305,7 +305,7 @@ func (m *CatalogClient) updateArtifact(ctx context.Context, key catalog.Key, dat return catalog.Status{}, fmt.Errorf("failed to get source from metadata. Error: %w", err) } - logger.Debugf(ctx, "Successfully updated artifact with ID %v and %d outputs for key %+v, dataset %+v and execution %+v", tag.ArtifactId, len(artifactDataList), key, datasetID, metadata) + logger.Debugf(ctx, "Successfully updated artifact with ID %v and %d outputs for key %+v, dataset %+v and execution %+v", tag.GetArtifactId(), len(artifactDataList), key, datasetID, metadata) return catalog.NewStatus(core.CatalogCacheStatus_CACHE_POPULATED, EventCatalogMetadata(datasetID, tag, source)), nil } @@ -382,7 +382,7 @@ func (m *CatalogClient) GetOrExtendReservation(ctx context.Context, key catalog. } inputs := &core.LiteralMap{} - if key.TypedInterface.Inputs != nil { + if key.TypedInterface.GetInputs() != nil { retInputs, err := key.InputReader.Get(ctx) if err != nil { return nil, errors.Wrap(err, "failed to read inputs when trying to query catalog") @@ -409,7 +409,7 @@ func (m *CatalogClient) GetOrExtendReservation(ctx context.Context, key catalog. return nil, err } - return response.Reservation, nil + return response.GetReservation(), nil } // ReleaseReservation attempts to release a reservation for a cacheable task. If the reservation @@ -422,7 +422,7 @@ func (m *CatalogClient) ReleaseReservation(ctx context.Context, key catalog.Key, } inputs := &core.LiteralMap{} - if key.TypedInterface.Inputs != nil { + if key.TypedInterface.GetInputs() != nil { retInputs, err := key.InputReader.Get(ctx) if err != nil { return errors.Wrap(err, "failed to read inputs when trying to query catalog") diff --git a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog_test.go b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog_test.go index ce8f6f4069..2a4c1a07eb 100644 --- a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog_test.go +++ b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/datacatalog_test.go @@ -108,7 +108,7 @@ func TestCatalog_Get(t *testing.T) { mockClient.On("GetDataset", ctx, mock.MatchedBy(func(o *datacatalog.GetDatasetRequest) bool { - assert.EqualValues(t, datasetID.String(), o.Dataset.String()) + assert.EqualValues(t, datasetID.String(), o.GetDataset().String()) return true }), ).Return(nil, status.Error(codes.NotFound, "test not found")) @@ -136,7 +136,7 @@ func TestCatalog_Get(t *testing.T) { mockClient.On("GetDataset", ctx, mock.MatchedBy(func(o *datacatalog.GetDatasetRequest) bool { - assert.EqualValues(t, datasetID.String(), o.Dataset.String()) + assert.EqualValues(t, datasetID.String(), o.GetDataset().String()) return true }), ).Return(&datacatalog.GetDatasetResponse{Dataset: sampleDataSet}, nil, "") @@ -167,9 +167,9 @@ func TestCatalog_Get(t *testing.T) { taskID := &core.TaskExecutionIdentifier{ TaskId: &core.Identifier{ ResourceType: core.ResourceType_TASK, - Name: sampleKey.Identifier.Name, - Project: sampleKey.Identifier.Project, - Domain: sampleKey.Identifier.Domain, + Name: sampleKey.Identifier.GetName(), + Project: sampleKey.Identifier.GetProject(), + Domain: sampleKey.Identifier.GetDomain(), Version: "ver", }, NodeExecutionId: &core.NodeExecutionIdentifier{ @@ -190,14 +190,14 @@ func TestCatalog_Get(t *testing.T) { mockClient.On("GetDataset", ctx, mock.MatchedBy(func(o *datacatalog.GetDatasetRequest) bool { - assert.EqualValues(t, datasetID, o.Dataset) + assert.EqualValues(t, datasetID, o.GetDataset()) return true }), ).Return(&datacatalog.GetDatasetResponse{Dataset: sampleDataSet}, nil) sampleArtifact := &datacatalog.Artifact{ Id: "test-artifact", - Dataset: sampleDataSet.Id, + Dataset: sampleDataSet.GetId(), Data: []*datacatalog.ArtifactData{sampleArtifactData}, Metadata: GetArtifactMetadataForSource(taskID), Tags: []*datacatalog.Tag{ @@ -208,16 +208,16 @@ func TestCatalog_Get(t *testing.T) { }, } - assert.Equal(t, taskID.NodeExecutionId.ExecutionId.Name, sampleArtifact.GetMetadata().KeyMap[execNameKey]) - assert.Equal(t, taskID.NodeExecutionId.NodeId, sampleArtifact.GetMetadata().KeyMap[execNodeIDKey]) - assert.Equal(t, taskID.NodeExecutionId.ExecutionId.Project, sampleArtifact.GetMetadata().KeyMap[execProjectKey]) - assert.Equal(t, taskID.NodeExecutionId.ExecutionId.Domain, sampleArtifact.GetMetadata().KeyMap[execDomainKey]) - assert.Equal(t, strconv.Itoa(int(taskID.RetryAttempt)), sampleArtifact.GetMetadata().KeyMap[execTaskAttemptKey]) + assert.Equal(t, taskID.GetNodeExecutionId().GetExecutionId().GetName(), sampleArtifact.GetMetadata().GetKeyMap()[execNameKey]) + assert.Equal(t, taskID.GetNodeExecutionId().GetNodeId(), sampleArtifact.GetMetadata().GetKeyMap()[execNodeIDKey]) + assert.Equal(t, taskID.GetNodeExecutionId().GetExecutionId().GetProject(), sampleArtifact.GetMetadata().GetKeyMap()[execProjectKey]) + assert.Equal(t, taskID.GetNodeExecutionId().GetExecutionId().GetDomain(), sampleArtifact.GetMetadata().GetKeyMap()[execDomainKey]) + assert.Equal(t, strconv.Itoa(int(taskID.GetRetryAttempt())), sampleArtifact.GetMetadata().GetKeyMap()[execTaskAttemptKey]) mockClient.On("GetArtifact", ctx, mock.MatchedBy(func(o *datacatalog.GetArtifactRequest) bool { - assert.EqualValues(t, datasetID, o.Dataset) + assert.EqualValues(t, datasetID, o.GetDataset()) assert.Equal(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.GetTagName()) return true }), @@ -228,18 +228,18 @@ func TestCatalog_Get(t *testing.T) { resp, err := catalogClient.Get(ctx, newKey) assert.NoError(t, err) assert.Equal(t, core.CatalogCacheStatus_CACHE_HIT.String(), resp.GetStatus().GetCacheStatus().String()) - assert.NotNil(t, resp.GetStatus().GetMetadata().DatasetId) - assert.Equal(t, core.ResourceType_DATASET, resp.GetStatus().GetMetadata().DatasetId.ResourceType) - assert.Equal(t, datasetID.Name, resp.GetStatus().GetMetadata().DatasetId.Name) - assert.Equal(t, datasetID.Project, resp.GetStatus().GetMetadata().DatasetId.Project) - assert.Equal(t, datasetID.Domain, resp.GetStatus().GetMetadata().DatasetId.Domain) - assert.Equal(t, datasetID.Version, resp.GetStatus().GetMetadata().DatasetId.Version) - assert.NotNil(t, resp.GetStatus().GetMetadata().ArtifactTag) - assert.NotNil(t, resp.GetStatus().GetMetadata().SourceExecution) + assert.NotNil(t, resp.GetStatus().GetMetadata().GetDatasetId()) + assert.Equal(t, core.ResourceType_DATASET, resp.GetStatus().GetMetadata().GetDatasetId().GetResourceType()) + assert.Equal(t, datasetID.GetName(), resp.GetStatus().GetMetadata().GetDatasetId().GetName()) + assert.Equal(t, datasetID.GetProject(), resp.GetStatus().GetMetadata().GetDatasetId().GetProject()) + assert.Equal(t, datasetID.GetDomain(), resp.GetStatus().GetMetadata().GetDatasetId().GetDomain()) + assert.Equal(t, datasetID.GetVersion(), resp.GetStatus().GetMetadata().GetDatasetId().GetVersion()) + assert.NotNil(t, resp.GetStatus().GetMetadata().GetArtifactTag()) + assert.NotNil(t, resp.GetStatus().GetMetadata().GetSourceExecution()) sourceTID := resp.GetStatus().GetMetadata().GetSourceTaskExecution() - assert.Equal(t, taskID.TaskId.String(), sourceTID.TaskId.String()) - assert.Equal(t, taskID.RetryAttempt, sourceTID.RetryAttempt) - assert.Equal(t, taskID.NodeExecutionId.String(), sourceTID.NodeExecutionId.String()) + assert.Equal(t, taskID.GetTaskId().String(), sourceTID.GetTaskId().String()) + assert.Equal(t, taskID.GetRetryAttempt(), sourceTID.GetRetryAttempt()) + assert.Equal(t, taskID.GetNodeExecutionId().String(), sourceTID.GetNodeExecutionId().String()) }) t.Run("Found expired artifact", func(t *testing.T) { @@ -259,7 +259,7 @@ func TestCatalog_Get(t *testing.T) { mockClient.On("GetDataset", ctx, mock.MatchedBy(func(o *datacatalog.GetDatasetRequest) bool { - assert.EqualValues(t, datasetID, o.Dataset) + assert.EqualValues(t, datasetID, o.GetDataset()) return true }), ).Return(&datacatalog.GetDatasetResponse{Dataset: sampleDataSet}, nil) @@ -268,14 +268,14 @@ func TestCatalog_Get(t *testing.T) { sampleArtifact := &datacatalog.Artifact{ Id: "test-artifact", - Dataset: sampleDataSet.Id, + Dataset: sampleDataSet.GetId(), Data: []*datacatalog.ArtifactData{sampleArtifactData}, CreatedAt: createdAt, } mockClient.On("GetArtifact", ctx, mock.MatchedBy(func(o *datacatalog.GetArtifactRequest) bool { - assert.EqualValues(t, datasetID, o.Dataset) + assert.EqualValues(t, datasetID, o.GetDataset()) assert.Equal(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.GetTagName()) return true }), @@ -309,7 +309,7 @@ func TestCatalog_Get(t *testing.T) { mockClient.On("GetDataset", ctx, mock.MatchedBy(func(o *datacatalog.GetDatasetRequest) bool { - assert.EqualValues(t, datasetID, o.Dataset) + assert.EqualValues(t, datasetID, o.GetDataset()) return true }), ).Return(&datacatalog.GetDatasetResponse{Dataset: sampleDataSet}, nil) @@ -318,14 +318,14 @@ func TestCatalog_Get(t *testing.T) { sampleArtifact := &datacatalog.Artifact{ Id: "test-artifact", - Dataset: sampleDataSet.Id, + Dataset: sampleDataSet.GetId(), Data: []*datacatalog.ArtifactData{sampleArtifactData}, CreatedAt: createdAt, } mockClient.On("GetArtifact", ctx, mock.MatchedBy(func(o *datacatalog.GetArtifactRequest) bool { - assert.EqualValues(t, datasetID, o.Dataset) + assert.EqualValues(t, datasetID, o.GetDataset()) assert.Equal(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.GetTagName()) return true }), @@ -356,20 +356,20 @@ func TestCatalog_Get(t *testing.T) { mockClient.On("GetDataset", ctx, mock.MatchedBy(func(o *datacatalog.GetDatasetRequest) bool { - assert.EqualValues(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", o.Dataset.Version) + assert.EqualValues(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", o.GetDataset().GetVersion()) return true }), ).Return(&datacatalog.GetDatasetResponse{Dataset: sampleDataSet}, nil) sampleArtifact := &datacatalog.Artifact{ Id: "test-artifact", - Dataset: sampleDataSet.Id, + Dataset: sampleDataSet.GetId(), Data: []*datacatalog.ArtifactData{}, } mockClient.On("GetArtifact", ctx, mock.MatchedBy(func(o *datacatalog.GetArtifactRequest) bool { - assert.EqualValues(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", o.Dataset.Version) + assert.EqualValues(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", o.GetDataset().GetVersion()) assert.Equal(t, "flyte_cached-GKw-c0PwFokMUQ6T-TUmEWnZ4_VlQ2Qpgw-vCTT0-OQ", o.GetTagName()) return true }), @@ -385,7 +385,7 @@ func TestCatalog_Get(t *testing.T) { v, e, err := resp.GetOutputs().Read(ctx) assert.NoError(t, err) assert.Nil(t, e) - assert.Len(t, v.Literals, 0) + assert.Len(t, v.GetLiterals(), 0) }) } @@ -404,7 +404,7 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("CreateDataset", ctx, mock.MatchedBy(func(o *datacatalog.CreateDatasetRequest) bool { - assert.True(t, proto.Equal(o.Dataset.Id, datasetID)) + assert.True(t, proto.Equal(o.GetDataset().GetId(), datasetID)) return true }), ).Return(&datacatalog.CreateDatasetResponse{}, nil) @@ -412,11 +412,11 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("CreateArtifact", ctx, mock.MatchedBy(func(o *datacatalog.CreateArtifactRequest) bool { - _, parseErr := uuid.Parse(o.Artifact.Id) + _, parseErr := uuid.Parse(o.GetArtifact().GetId()) assert.NoError(t, parseErr) - assert.EqualValues(t, 1, len(o.Artifact.Data)) - assert.EqualValues(t, "out1", o.Artifact.Data[0].Name) - assert.True(t, proto.Equal(newStringLiteral("output1-stringval"), o.Artifact.Data[0].Value)) + assert.EqualValues(t, 1, len(o.GetArtifact().GetData())) + assert.EqualValues(t, "out1", o.GetArtifact().GetData()[0].GetName()) + assert.True(t, proto.Equal(newStringLiteral("output1-stringval"), o.GetArtifact().GetData()[0].GetValue())) return true }), ).Return(&datacatalog.CreateArtifactResponse{}, nil) @@ -424,7 +424,7 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("AddTag", ctx, mock.MatchedBy(func(o *datacatalog.AddTagRequest) bool { - assert.EqualValues(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.Tag.Name) + assert.EqualValues(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.GetTag().GetName()) return true }), ).Return(&datacatalog.AddTagResponse{}, nil) @@ -440,7 +440,7 @@ func TestCatalog_Put(t *testing.T) { assert.NoError(t, err) assert.Equal(t, core.CatalogCacheStatus_CACHE_POPULATED, s.GetCacheStatus()) assert.NotNil(t, s.GetMetadata()) - assert.Equal(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", s.GetMetadata().ArtifactTag.Name) + assert.Equal(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", s.GetMetadata().GetArtifactTag().GetName()) }) t.Run("Create dataset fails", func(t *testing.T) { @@ -519,7 +519,7 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("CreateDataset", ctx, mock.MatchedBy(func(o *datacatalog.CreateDatasetRequest) bool { - assert.Equal(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", o.Dataset.Id.Version) + assert.Equal(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", o.GetDataset().GetId().GetVersion()) return true }), ).Return(&datacatalog.CreateDatasetResponse{}, nil) @@ -527,7 +527,7 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("CreateArtifact", ctx, mock.MatchedBy(func(o *datacatalog.CreateArtifactRequest) bool { - assert.EqualValues(t, 0, len(o.Artifact.Data)) + assert.EqualValues(t, 0, len(o.GetArtifact().GetData())) return true }), ).Return(&datacatalog.CreateArtifactResponse{}, nil) @@ -535,7 +535,7 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("AddTag", ctx, mock.MatchedBy(func(o *datacatalog.AddTagRequest) bool { - assert.EqualValues(t, "flyte_cached-GKw-c0PwFokMUQ6T-TUmEWnZ4_VlQ2Qpgw-vCTT0-OQ", o.Tag.Name) + assert.EqualValues(t, "flyte_cached-GKw-c0PwFokMUQ6T-TUmEWnZ4_VlQ2Qpgw-vCTT0-OQ", o.GetTag().GetName()) return true }), ).Return(&datacatalog.AddTagResponse{}, nil) @@ -567,11 +567,11 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("CreateArtifact", ctx, mock.MatchedBy(func(o *datacatalog.CreateArtifactRequest) bool { - _, parseErr := uuid.Parse(o.Artifact.Id) + _, parseErr := uuid.Parse(o.GetArtifact().GetId()) assert.NoError(t, parseErr) - assert.EqualValues(t, 1, len(o.Artifact.Data)) - assert.EqualValues(t, "out1", o.Artifact.Data[0].Name) - assert.True(t, proto.Equal(newStringLiteral("output1-stringval"), o.Artifact.Data[0].Value)) + assert.EqualValues(t, 1, len(o.GetArtifact().GetData())) + assert.EqualValues(t, "out1", o.GetArtifact().GetData()[0].GetName()) + assert.True(t, proto.Equal(newStringLiteral("output1-stringval"), o.GetArtifact().GetData()[0].GetValue())) createArtifactCalled = true return true }), @@ -581,7 +581,7 @@ func TestCatalog_Put(t *testing.T) { mockClient.On("AddTag", ctx, mock.MatchedBy(func(o *datacatalog.AddTagRequest) bool { - assert.EqualValues(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.Tag.Name) + assert.EqualValues(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.GetTag().GetName()) addTagCalled = true return true }), @@ -619,7 +619,7 @@ func TestCatalog_Update(t *testing.T) { mockClient.On("CreateDataset", ctx, mock.MatchedBy(func(o *datacatalog.CreateDatasetRequest) bool { - assert.True(t, proto.Equal(o.Dataset.Id, datasetID)) + assert.True(t, proto.Equal(o.GetDataset().GetId(), datasetID)) return true }), ).Return(&datacatalog.CreateDatasetResponse{}, nil) @@ -627,8 +627,8 @@ func TestCatalog_Update(t *testing.T) { mockClient.On("UpdateArtifact", ctx, mock.MatchedBy(func(o *datacatalog.UpdateArtifactRequest) bool { - assert.True(t, proto.Equal(o.Dataset, datasetID)) - assert.IsType(t, &datacatalog.UpdateArtifactRequest_TagName{}, o.QueryHandle) + assert.True(t, proto.Equal(o.GetDataset(), datasetID)) + assert.IsType(t, &datacatalog.UpdateArtifactRequest_TagName{}, o.GetQueryHandle()) assert.Equal(t, tagName, o.GetTagName()) return true }), @@ -637,9 +637,9 @@ func TestCatalog_Update(t *testing.T) { taskID := &core.TaskExecutionIdentifier{ TaskId: &core.Identifier{ ResourceType: core.ResourceType_TASK, - Name: sampleKey.Identifier.Name, - Project: sampleKey.Identifier.Project, - Domain: sampleKey.Identifier.Domain, + Name: sampleKey.Identifier.GetName(), + Project: sampleKey.Identifier.GetProject(), + Domain: sampleKey.Identifier.GetDomain(), Version: "version", }, NodeExecutionId: &core.NodeExecutionIdentifier{ @@ -658,24 +658,24 @@ func TestCatalog_Update(t *testing.T) { or := ioutils.NewInMemoryOutputReader(sampleParameters, nil, nil) s, err := discovery.Update(ctx, newKey, or, catalog.Metadata{ WorkflowExecutionIdentifier: &core.WorkflowExecutionIdentifier{ - Name: taskID.NodeExecutionId.ExecutionId.Name, - Domain: taskID.NodeExecutionId.ExecutionId.Domain, - Project: taskID.NodeExecutionId.ExecutionId.Project, + Name: taskID.GetNodeExecutionId().GetExecutionId().GetName(), + Domain: taskID.GetNodeExecutionId().GetExecutionId().GetDomain(), + Project: taskID.GetNodeExecutionId().GetExecutionId().GetProject(), }, TaskExecutionIdentifier: &core.TaskExecutionIdentifier{ TaskId: &sampleKey.Identifier, - NodeExecutionId: taskID.NodeExecutionId, + NodeExecutionId: taskID.GetNodeExecutionId(), RetryAttempt: 0, }, }) assert.NoError(t, err) assert.Equal(t, core.CatalogCacheStatus_CACHE_POPULATED, s.GetCacheStatus()) assert.NotNil(t, s.GetMetadata()) - assert.Equal(t, tagName, s.GetMetadata().ArtifactTag.Name) + assert.Equal(t, tagName, s.GetMetadata().GetArtifactTag().GetName()) sourceTID := s.GetMetadata().GetSourceTaskExecution() - assert.Equal(t, taskID.TaskId.String(), sourceTID.TaskId.String()) - assert.Equal(t, taskID.RetryAttempt, sourceTID.RetryAttempt) - assert.Equal(t, taskID.NodeExecutionId.String(), sourceTID.NodeExecutionId.String()) + assert.Equal(t, taskID.GetTaskId().String(), sourceTID.GetTaskId().String()) + assert.Equal(t, taskID.GetRetryAttempt(), sourceTID.GetRetryAttempt()) + assert.Equal(t, taskID.GetNodeExecutionId().String(), sourceTID.GetNodeExecutionId().String()) }) t.Run("Overwrite non-existing execution", func(t *testing.T) { @@ -706,9 +706,9 @@ func TestCatalog_Update(t *testing.T) { taskID := &core.TaskExecutionIdentifier{ TaskId: &core.Identifier{ ResourceType: core.ResourceType_TASK, - Name: sampleKey.Identifier.Name, - Project: sampleKey.Identifier.Project, - Domain: sampleKey.Identifier.Domain, + Name: sampleKey.Identifier.GetName(), + Project: sampleKey.Identifier.GetProject(), + Domain: sampleKey.Identifier.GetDomain(), Version: "version", }, NodeExecutionId: &core.NodeExecutionIdentifier{ @@ -727,13 +727,13 @@ func TestCatalog_Update(t *testing.T) { or := ioutils.NewInMemoryOutputReader(sampleParameters, nil, nil) s, err := discovery.Update(ctx, newKey, or, catalog.Metadata{ WorkflowExecutionIdentifier: &core.WorkflowExecutionIdentifier{ - Name: taskID.NodeExecutionId.ExecutionId.Name, - Domain: taskID.NodeExecutionId.ExecutionId.Domain, - Project: taskID.NodeExecutionId.ExecutionId.Project, + Name: taskID.GetNodeExecutionId().GetExecutionId().GetName(), + Domain: taskID.GetNodeExecutionId().GetExecutionId().GetDomain(), + Project: taskID.GetNodeExecutionId().GetExecutionId().GetProject(), }, TaskExecutionIdentifier: &core.TaskExecutionIdentifier{ TaskId: &sampleKey.Identifier, - NodeExecutionId: taskID.NodeExecutionId, + NodeExecutionId: taskID.GetNodeExecutionId(), RetryAttempt: 0, }, }) @@ -755,7 +755,7 @@ func TestCatalog_Update(t *testing.T) { mockClient.On("CreateDataset", ctx, mock.MatchedBy(func(o *datacatalog.CreateDatasetRequest) bool { - assert.True(t, proto.Equal(o.Dataset.Id, datasetID)) + assert.True(t, proto.Equal(o.GetDataset().GetId(), datasetID)) createDatasetCalled = true return true }), @@ -770,9 +770,9 @@ func TestCatalog_Update(t *testing.T) { mockClient.On("CreateArtifact", ctx, mock.MatchedBy(func(o *datacatalog.CreateArtifactRequest) bool { - _, parseErr := uuid.Parse(o.Artifact.Id) + _, parseErr := uuid.Parse(o.GetArtifact().GetId()) assert.NoError(t, parseErr) - assert.True(t, proto.Equal(o.Artifact.Dataset, datasetID)) + assert.True(t, proto.Equal(o.GetArtifact().GetDataset(), datasetID)) createArtifactCalled = true return true }), @@ -782,7 +782,7 @@ func TestCatalog_Update(t *testing.T) { mockClient.On("AddTag", ctx, mock.MatchedBy(func(o *datacatalog.AddTagRequest) bool { - assert.EqualValues(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.Tag.Name) + assert.EqualValues(t, "flyte_cached-BE6CZsMk6N3ExR_4X9EuwBgj2Jh2UwasXK3a_pM9xlY", o.GetTag().GetName()) addTagCalled = true return true }), @@ -791,9 +791,9 @@ func TestCatalog_Update(t *testing.T) { taskID := &core.TaskExecutionIdentifier{ TaskId: &core.Identifier{ ResourceType: core.ResourceType_TASK, - Name: sampleKey.Identifier.Name, - Project: sampleKey.Identifier.Project, - Domain: sampleKey.Identifier.Domain, + Name: sampleKey.Identifier.GetName(), + Project: sampleKey.Identifier.GetProject(), + Domain: sampleKey.Identifier.GetDomain(), Version: "version", }, NodeExecutionId: &core.NodeExecutionIdentifier{ @@ -812,20 +812,20 @@ func TestCatalog_Update(t *testing.T) { or := ioutils.NewInMemoryOutputReader(sampleParameters, nil, nil) s, err := discovery.Update(ctx, newKey, or, catalog.Metadata{ WorkflowExecutionIdentifier: &core.WorkflowExecutionIdentifier{ - Name: taskID.NodeExecutionId.ExecutionId.Name, - Domain: taskID.NodeExecutionId.ExecutionId.Domain, - Project: taskID.NodeExecutionId.ExecutionId.Project, + Name: taskID.GetNodeExecutionId().GetExecutionId().GetName(), + Domain: taskID.GetNodeExecutionId().GetExecutionId().GetDomain(), + Project: taskID.GetNodeExecutionId().GetExecutionId().GetProject(), }, TaskExecutionIdentifier: &core.TaskExecutionIdentifier{ TaskId: &sampleKey.Identifier, - NodeExecutionId: taskID.NodeExecutionId, + NodeExecutionId: taskID.GetNodeExecutionId(), RetryAttempt: 0, }, }) assert.NoError(t, err) assert.Equal(t, core.CatalogCacheStatus_CACHE_POPULATED, s.GetCacheStatus()) assert.NotNil(t, s.GetMetadata()) - assert.Equal(t, tagName, s.GetMetadata().ArtifactTag.Name) + assert.Equal(t, tagName, s.GetMetadata().GetArtifactTag().GetName()) assert.Nil(t, s.GetMetadata().GetSourceTaskExecution()) assert.True(t, createDatasetCalled) assert.True(t, updateArtifactCalled) @@ -932,8 +932,8 @@ func TestCatalog_GetOrExtendReservation(t *testing.T) { mockClient.On("GetOrExtendReservation", ctx, mock.MatchedBy(func(o *datacatalog.GetOrExtendReservationRequest) bool { - assert.EqualValues(t, datasetID.String(), o.ReservationId.DatasetId.String()) - assert.EqualValues(t, tagName, o.ReservationId.TagName) + assert.EqualValues(t, datasetID.String(), o.GetReservationId().GetDatasetId().String()) + assert.EqualValues(t, tagName, o.GetReservationId().GetTagName()) return true }), ).Return(&datacatalog.GetOrExtendReservationResponse{Reservation: ¤tReservation}, nil, "") @@ -943,7 +943,7 @@ func TestCatalog_GetOrExtendReservation(t *testing.T) { reservation, err := catalogClient.GetOrExtendReservation(ctx, newKey, currentOwner, heartbeatInterval) assert.NoError(t, err) - assert.Equal(t, reservation.OwnerId, currentOwner) + assert.Equal(t, reservation.GetOwnerId(), currentOwner) }) t.Run("ExistingReservation", func(t *testing.T) { @@ -958,8 +958,8 @@ func TestCatalog_GetOrExtendReservation(t *testing.T) { mockClient.On("GetOrExtendReservation", ctx, mock.MatchedBy(func(o *datacatalog.GetOrExtendReservationRequest) bool { - assert.EqualValues(t, datasetID.String(), o.ReservationId.DatasetId.String()) - assert.EqualValues(t, tagName, o.ReservationId.TagName) + assert.EqualValues(t, datasetID.String(), o.GetReservationId().GetDatasetId().String()) + assert.EqualValues(t, tagName, o.GetReservationId().GetTagName()) return true }), ).Return(&datacatalog.GetOrExtendReservationResponse{Reservation: &prevReservation}, nil, "") @@ -969,7 +969,7 @@ func TestCatalog_GetOrExtendReservation(t *testing.T) { reservation, err := catalogClient.GetOrExtendReservation(ctx, newKey, currentOwner, heartbeatInterval) assert.NoError(t, err) - assert.Equal(t, reservation.OwnerId, prevOwner) + assert.Equal(t, reservation.GetOwnerId(), prevOwner) }) } @@ -988,8 +988,8 @@ func TestCatalog_ReleaseReservation(t *testing.T) { mockClient.On("ReleaseReservation", ctx, mock.MatchedBy(func(o *datacatalog.ReleaseReservationRequest) bool { - assert.EqualValues(t, datasetID.String(), o.ReservationId.DatasetId.String()) - assert.EqualValues(t, tagName, o.ReservationId.TagName) + assert.EqualValues(t, datasetID.String(), o.GetReservationId().GetDatasetId().String()) + assert.EqualValues(t, tagName, o.GetReservationId().GetTagName()) return true }), ).Return(&datacatalog.ReleaseReservationResponse{}, nil, "") @@ -1013,8 +1013,8 @@ func TestCatalog_ReleaseReservation(t *testing.T) { mockClient.On("ReleaseReservation", ctx, mock.MatchedBy(func(o *datacatalog.ReleaseReservationRequest) bool { - assert.EqualValues(t, datasetID.String(), o.ReservationId.DatasetId.String()) - assert.EqualValues(t, tagName, o.ReservationId.TagName) + assert.EqualValues(t, datasetID.String(), o.GetReservationId().GetDatasetId().String()) + assert.EqualValues(t, tagName, o.GetReservationId().GetTagName()) return true }), ).Return(nil, status.Error(codes.NotFound, "reservation not found")) diff --git a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer.go b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer.go index c145ca51c6..9ff494e54f 100644 --- a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer.go +++ b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer.go @@ -26,38 +26,38 @@ var emptyLiteralMap = core.LiteralMap{Literals: map[string]*core.Literal{}} var emptyVariableMap = core.VariableMap{Variables: map[string]*core.Variable{}} func getDatasetNameFromTask(taskID core.Identifier) string { - return fmt.Sprintf("%s-%s", taskNamespace, taskID.Name) + return fmt.Sprintf("%s-%s", taskNamespace, taskID.GetName()) } // Transform the artifact Data into task execution outputs as a literal map func GenerateTaskOutputsFromArtifact(id core.Identifier, taskInterface core.TypedInterface, artifact *datacatalog.Artifact) (*core.LiteralMap, error) { // if there are no outputs in the task, return empty map - if taskInterface.Outputs == nil || len(taskInterface.Outputs.Variables) == 0 { + if taskInterface.GetOutputs() == nil || len(taskInterface.GetOutputs().GetVariables()) == 0 { return &emptyLiteralMap, nil } - outputVariables := taskInterface.Outputs.Variables - artifactDataList := artifact.Data + outputVariables := taskInterface.GetOutputs().GetVariables() + artifactDataList := artifact.GetData() // verify the task outputs matches what is stored in ArtifactData if len(outputVariables) != len(artifactDataList) { - return nil, fmt.Errorf("the task %s with %d outputs, should have %d artifactData for artifact %s", id.String(), len(outputVariables), len(artifactDataList), artifact.Id) + return nil, fmt.Errorf("the task %s with %d outputs, should have %d artifactData for artifact %s", id.String(), len(outputVariables), len(artifactDataList), artifact.GetId()) } outputs := make(map[string]*core.Literal, len(artifactDataList)) for _, artifactData := range artifactDataList { // verify that the name and type of artifactData matches what is expected from the interface - if _, ok := outputVariables[artifactData.Name]; !ok { - return nil, fmt.Errorf("unexpected artifactData with name [%v] does not match any task output variables %v", artifactData.Name, reflect.ValueOf(outputVariables).MapKeys()) + if _, ok := outputVariables[artifactData.GetName()]; !ok { + return nil, fmt.Errorf("unexpected artifactData with name [%v] does not match any task output variables %v", artifactData.GetName(), reflect.ValueOf(outputVariables).MapKeys()) } - expectedVarType := outputVariables[artifactData.Name].GetType() - if !validators.IsInstance(artifactData.Value, expectedVarType) { - return nil, fmt.Errorf("unexpected artifactData: [%v] val: [%v] does not match any task output type: [%v]", artifactData.Name, artifactData.Value, expectedVarType) + expectedVarType := outputVariables[artifactData.GetName()].GetType() + if !validators.IsInstance(artifactData.GetValue(), expectedVarType) { + return nil, fmt.Errorf("unexpected artifactData: [%v] val: [%v] does not match any task output type: [%v]", artifactData.GetName(), artifactData.GetValue(), expectedVarType) } - outputs[artifactData.Name] = artifactData.Value + outputs[artifactData.GetName()] = artifactData.GetValue() } return &core.LiteralMap{Literals: outputs}, nil @@ -81,12 +81,12 @@ func generateTaskSignatureHash(ctx context.Context, taskInterface core.TypedInte taskInputs := &emptyVariableMap taskOutputs := &emptyVariableMap - if taskInterface.Inputs != nil && len(taskInterface.Inputs.Variables) != 0 { - taskInputs = taskInterface.Inputs + if taskInterface.GetInputs() != nil && len(taskInterface.GetInputs().GetVariables()) != 0 { + taskInputs = taskInterface.GetInputs() } - if taskInterface.Outputs != nil && len(taskInterface.Outputs.Variables) != 0 { - taskOutputs = taskInterface.Outputs + if taskInterface.GetOutputs() != nil && len(taskInterface.GetOutputs().GetVariables()) != 0 { + taskOutputs = taskInterface.GetOutputs() } inputHash, err := pbhash.ComputeHash(ctx, taskInputs) @@ -133,8 +133,8 @@ func GenerateDatasetIDForTask(ctx context.Context, k catalog.Key) (*datacatalog. } datasetID := &datacatalog.DatasetID{ - Project: k.Identifier.Project, - Domain: k.Identifier.Domain, + Project: k.Identifier.GetProject(), + Domain: k.Identifier.GetDomain(), Name: getDatasetNameFromTask(k.Identifier), Version: datasetVersion, } @@ -145,7 +145,7 @@ func DatasetIDToIdentifier(id *datacatalog.DatasetID) *core.Identifier { if id == nil { return nil } - return &core.Identifier{ResourceType: core.ResourceType_DATASET, Name: id.Name, Project: id.Project, Domain: id.Domain, Version: id.Version} + return &core.Identifier{ResourceType: core.ResourceType_DATASET, Name: id.GetName(), Project: id.GetProject(), Domain: id.GetDomain(), Version: id.GetVersion()} } // With Node-Node relationship this is bound to change. So lets keep it extensible @@ -170,7 +170,7 @@ func GetDatasetMetadataForSource(taskExecutionID *core.TaskExecutionIdentifier) } return &datacatalog.Metadata{ KeyMap: map[string]string{ - taskVersionKey: taskExecutionID.TaskId.Version, + taskVersionKey: taskExecutionID.GetTaskId().GetVersion(), }, } } @@ -181,10 +181,10 @@ func GetArtifactMetadataForSource(taskExecutionID *core.TaskExecutionIdentifier) } return &datacatalog.Metadata{ KeyMap: map[string]string{ - execProjectKey: taskExecutionID.NodeExecutionId.GetExecutionId().GetProject(), - execDomainKey: taskExecutionID.NodeExecutionId.GetExecutionId().GetDomain(), - execNameKey: taskExecutionID.NodeExecutionId.GetExecutionId().GetName(), - execNodeIDKey: taskExecutionID.NodeExecutionId.GetNodeId(), + execProjectKey: taskExecutionID.GetNodeExecutionId().GetExecutionId().GetProject(), + execDomainKey: taskExecutionID.GetNodeExecutionId().GetExecutionId().GetDomain(), + execNameKey: taskExecutionID.GetNodeExecutionId().GetExecutionId().GetName(), + execNodeIDKey: taskExecutionID.GetNodeExecutionId().GetNodeId(), execTaskAttemptKey: strconv.Itoa(int(taskExecutionID.GetRetryAttempt())), }, } @@ -202,7 +202,7 @@ func GetSourceFromMetadata(datasetMd, artifactMd *datacatalog.Metadata, currentI } // Jul-06-2020 DataCatalog stores only wfExecutionKey & taskVersionKey So we will default the project / domain to the current dataset's project domain - val := GetOrDefault(artifactMd.KeyMap, execTaskAttemptKey, "0") + val := GetOrDefault(artifactMd.GetKeyMap(), execTaskAttemptKey, "0") attempt, err := strconv.ParseUint(val, 10, 32) if err != nil { return nil, fmt.Errorf("failed to parse [%v] to integer. Error: %w", val, err) @@ -210,19 +210,19 @@ func GetSourceFromMetadata(datasetMd, artifactMd *datacatalog.Metadata, currentI return &core.TaskExecutionIdentifier{ TaskId: &core.Identifier{ - ResourceType: currentID.ResourceType, - Project: currentID.Project, - Domain: currentID.Domain, - Name: currentID.Name, - Version: GetOrDefault(datasetMd.KeyMap, taskVersionKey, "unknown"), + ResourceType: currentID.GetResourceType(), + Project: currentID.GetProject(), + Domain: currentID.GetDomain(), + Name: currentID.GetName(), + Version: GetOrDefault(datasetMd.GetKeyMap(), taskVersionKey, "unknown"), }, RetryAttempt: uint32(attempt), NodeExecutionId: &core.NodeExecutionIdentifier{ - NodeId: GetOrDefault(artifactMd.KeyMap, execNodeIDKey, "unknown"), + NodeId: GetOrDefault(artifactMd.GetKeyMap(), execNodeIDKey, "unknown"), ExecutionId: &core.WorkflowExecutionIdentifier{ - Project: GetOrDefault(artifactMd.KeyMap, execProjectKey, currentID.GetProject()), - Domain: GetOrDefault(artifactMd.KeyMap, execDomainKey, currentID.GetDomain()), - Name: GetOrDefault(artifactMd.KeyMap, execNameKey, "unknown"), + Project: GetOrDefault(artifactMd.GetKeyMap(), execProjectKey, currentID.GetProject()), + Domain: GetOrDefault(artifactMd.GetKeyMap(), execDomainKey, currentID.GetDomain()), + Name: GetOrDefault(artifactMd.GetKeyMap(), execNameKey, "unknown"), }, }, }, nil @@ -236,8 +236,8 @@ func EventCatalogMetadata(datasetID *datacatalog.DatasetID, tag *datacatalog.Tag if tag != nil { md.ArtifactTag = &core.CatalogArtifactTag{ - ArtifactId: tag.ArtifactId, - Name: tag.Name, + ArtifactId: tag.GetArtifactId(), + Name: tag.GetName(), } } diff --git a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer_test.go b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer_test.go index 15bceb2e08..c1d27f5891 100644 --- a/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer_test.go +++ b/flytepropeller/pkg/controller/nodes/catalog/datacatalog/transformer_test.go @@ -32,8 +32,8 @@ func TestNilParamTask(t *testing.T) { } datasetID, err := GenerateDatasetIDForTask(context.TODO(), key) assert.NoError(t, err) - assert.NotEmpty(t, datasetID.Version) - assert.Equal(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", datasetID.Version) + assert.NotEmpty(t, datasetID.GetVersion()) + assert.Equal(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", datasetID.GetVersion()) } // Ensure that empty parameters generate the same dataset as nil parameters @@ -53,8 +53,8 @@ func TestEmptyParamTask(t *testing.T) { } datasetID, err := GenerateDatasetIDForTask(context.TODO(), key) assert.NoError(t, err) - assert.NotEmpty(t, datasetID.Version) - assert.Equal(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", datasetID.Version) + assert.NotEmpty(t, datasetID.GetVersion()) + assert.Equal(t, "1.0.0-GKw-c0Pw-GKw-c0Pw", datasetID.GetVersion()) key.TypedInterface.Inputs = nil key.TypedInterface.Outputs = nil @@ -84,8 +84,8 @@ func TestVariableMapOrder(t *testing.T) { } datasetID, err := GenerateDatasetIDForTask(context.TODO(), key) assert.NoError(t, err) - assert.NotEmpty(t, datasetID.Version) - assert.Equal(t, "1.0.0-UxVtPm0k-GKw-c0Pw", datasetID.Version) + assert.NotEmpty(t, datasetID.GetVersion()) + assert.Equal(t, "1.0.0-UxVtPm0k-GKw-c0Pw", datasetID.GetVersion()) key.TypedInterface.Inputs = &core.VariableMap{ Variables: map[string]*core.Variable{ @@ -96,7 +96,7 @@ func TestVariableMapOrder(t *testing.T) { datasetIDDupe, err := GenerateDatasetIDForTask(context.TODO(), key) assert.NoError(t, err) - assert.Equal(t, "1.0.0-UxVtPm0k-GKw-c0Pw", datasetIDDupe.Version) + assert.Equal(t, "1.0.0-UxVtPm0k-GKw-c0Pw", datasetIDDupe.GetVersion()) assert.Equal(t, datasetID.String(), datasetIDDupe.String()) } @@ -173,17 +173,17 @@ func TestGetArtifactMetadataForSource(t *testing.T) { }{ {"nil TaskExec", args{}, nil}, {"TaskExec", args{tID}, map[string]string{ - execTaskAttemptKey: strconv.Itoa(int(tID.RetryAttempt)), - execProjectKey: tID.NodeExecutionId.ExecutionId.Project, - execDomainKey: tID.NodeExecutionId.ExecutionId.Domain, - execNodeIDKey: tID.NodeExecutionId.NodeId, - execNameKey: tID.NodeExecutionId.ExecutionId.Name, + execTaskAttemptKey: strconv.Itoa(int(tID.GetRetryAttempt())), + execProjectKey: tID.GetNodeExecutionId().GetExecutionId().GetProject(), + execDomainKey: tID.GetNodeExecutionId().GetExecutionId().GetDomain(), + execNodeIDKey: tID.GetNodeExecutionId().GetNodeId(), + execNameKey: tID.GetNodeExecutionId().GetExecutionId().GetName(), }}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if got := GetArtifactMetadataForSource(tt.args.taskExecutionID); !reflect.DeepEqual(got.KeyMap, tt.want) { - t.Errorf("GetMetadataForSource() = %v, want %v", got.KeyMap, tt.want) + if got := GetArtifactMetadataForSource(tt.args.taskExecutionID); !reflect.DeepEqual(got.GetKeyMap(), tt.want) { + t.Errorf("GetMetadataForSource() = %v, want %v", got.GetKeyMap(), tt.want) } }) } @@ -247,13 +247,13 @@ func TestGetSourceFromMetadata(t *testing.T) { RetryAttempt: 0, }}, // In legacy only taskVersionKey is available - {"legacy", args{datasetMd: GetDatasetMetadataForSource(&tID).KeyMap, currentID: currentTaskID}, &core.TaskExecutionIdentifier{ + {"legacy", args{datasetMd: GetDatasetMetadataForSource(&tID).GetKeyMap(), currentID: currentTaskID}, &core.TaskExecutionIdentifier{ TaskId: &core.Identifier{ ResourceType: core.ResourceType_TASK, Name: "x", Project: "project", Domain: "development", - Version: tID.TaskId.Version, + Version: tID.GetTaskId().GetVersion(), }, NodeExecutionId: &core.NodeExecutionIdentifier{ ExecutionId: &core.WorkflowExecutionIdentifier{ @@ -266,7 +266,7 @@ func TestGetSourceFromMetadata(t *testing.T) { RetryAttempt: 0, }}, // Completely available - {"latest", args{datasetMd: GetDatasetMetadataForSource(&tID).KeyMap, artifactMd: GetArtifactMetadataForSource(&tID).KeyMap, currentID: currentTaskID}, &tID}, + {"latest", args{datasetMd: GetDatasetMetadataForSource(&tID).GetKeyMap(), artifactMd: GetArtifactMetadataForSource(&tID).GetKeyMap(), currentID: currentTaskID}, &tID}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -325,11 +325,11 @@ func TestEventCatalogMetadata(t *testing.T) { func TestDatasetIDToIdentifier(t *testing.T) { id := DatasetIDToIdentifier(&datacatalog.DatasetID{Project: "p", Domain: "d", Name: "n", Version: "v"}) - assert.Equal(t, core.ResourceType_DATASET, id.ResourceType) - assert.Equal(t, "n", id.Name) - assert.Equal(t, "p", id.Project) - assert.Equal(t, "d", id.Domain) - assert.Equal(t, "v", id.Version) + assert.Equal(t, core.ResourceType_DATASET, id.GetResourceType()) + assert.Equal(t, "n", id.GetName()) + assert.Equal(t, "p", id.GetProject()) + assert.Equal(t, "d", id.GetDomain()) + assert.Equal(t, "v", id.GetVersion()) } func TestGenerateTaskOutputsFromArtifact_IDLNotFound(t *testing.T) { diff --git a/flytepropeller/pkg/controller/nodes/common/utils.go b/flytepropeller/pkg/controller/nodes/common/utils.go index 7a7e0c0941..6bb7ee554b 100644 --- a/flytepropeller/pkg/controller/nodes/common/utils.go +++ b/flytepropeller/pkg/controller/nodes/common/utils.go @@ -75,7 +75,7 @@ func GetTargetEntity(ctx context.Context, nCtx interfaces.NodeExecutionContext) // This doesn't feed a very important part of the node execution event, swallow it for now. logger.Errorf(ctx, "Failed to get task [%v] with error [%v]", taskID, err) } - targetEntity = taskID.CoreTask().Id + targetEntity = taskID.CoreTask().GetId() } return targetEntity } @@ -110,7 +110,7 @@ func OffloadLargeLiteral(ctx context.Context, datastore *storage.DataStore, data if literalSizeMB >= literalOffloadingConfig.MaxSizeInMBForOffloading { errString := fmt.Sprintf("Literal size [%d] MB is larger than the max size [%d] MB for offloading", literalSizeMB, literalOffloadingConfig.MaxSizeInMBForOffloading) logger.Errorf(ctx, errString) - return fmt.Errorf(errString) + return fmt.Errorf(errString) //nolint:govet,staticcheck } if literalSizeMB < literalOffloadingConfig.MinSizeInMBForOffloading { logger.Debugf(ctx, "Literal size [%d] MB is smaller than the min size [%d] MB for offloading", literalSizeMB, literalOffloadingConfig.MinSizeInMBForOffloading) @@ -120,7 +120,7 @@ func OffloadLargeLiteral(ctx context.Context, datastore *storage.DataStore, data if inferredType == nil { errString := "Failed to determine literal type for offloaded literal" logger.Errorf(ctx, errString) - return fmt.Errorf(errString) + return fmt.Errorf(errString) //nolint:govet,staticcheck } // offload the literal @@ -143,7 +143,7 @@ func OffloadLargeLiteral(ctx context.Context, datastore *storage.DataStore, data toBeOffloaded.Value = &idlcore.Literal_OffloadedMetadata{ OffloadedMetadata: &idlcore.LiteralOffloadedMetadata{ Uri: dataReference.String(), - SizeBytes: uint64(literalSizeBytes), + SizeBytes: uint64(literalSizeBytes), // #nosec G115 InferredType: inferredType, }, } diff --git a/flytepropeller/pkg/controller/nodes/common/utils_test.go b/flytepropeller/pkg/controller/nodes/common/utils_test.go index 6043f566ea..ac1ca45bbd 100644 --- a/flytepropeller/pkg/controller/nodes/common/utils_test.go +++ b/flytepropeller/pkg/controller/nodes/common/utils_test.go @@ -154,8 +154,8 @@ func TestOffloadLargeLiteral(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "foo/bar", toBeOffloaded.GetOffloadedMetadata().GetUri()) assert.Equal(t, uint64(6), toBeOffloaded.GetOffloadedMetadata().GetSizeBytes()) - assert.Equal(t, inferredType.GetSimple(), toBeOffloaded.GetOffloadedMetadata().InferredType.GetSimple()) - assert.Equal(t, base64.RawURLEncoding.EncodeToString(expectedLiteralDigest), toBeOffloaded.Hash) + assert.Equal(t, inferredType.GetSimple(), toBeOffloaded.GetOffloadedMetadata().GetInferredType().GetSimple()) + assert.Equal(t, base64.RawURLEncoding.EncodeToString(expectedLiteralDigest), toBeOffloaded.GetHash()) }) t.Run("offload successful with valid size and hash passed in", func(t *testing.T) { @@ -191,7 +191,7 @@ func TestOffloadLargeLiteral(t *testing.T) { } err := OffloadLargeLiteral(ctx, datastore, dataReference, toBeOffloaded, inferredType, literalOffloadingConfig) assert.NoError(t, err) - assert.Equal(t, "hash", toBeOffloaded.Hash) + assert.Equal(t, "hash", toBeOffloaded.GetHash()) }) t.Run("offload fails with size larger than max", func(t *testing.T) { diff --git a/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow.go b/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow.go index 95e8f4c0bb..de0108d4dc 100644 --- a/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow.go +++ b/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow.go @@ -46,8 +46,8 @@ func setEphemeralNodeExecutionStatusAttributes(ctx context.Context, djSpec *core // We keep track of the original node ids because that's where flytekit inputs are written to in the case of legacy // map tasks. The modern map tasks do not write input files any longer and this entire piece of code can be removed. parentNodeID := nCtx.NodeID() - for _, node := range djSpec.Nodes { - nodeID := node.Id + for _, node := range djSpec.GetNodes() { + nodeID := node.GetId() var subNodeStatus v1alpha1.ExecutableNodeStatus newID, err := hierarchicalNodeID(parentNodeID, currentAttemptStr, nodeID) if err != nil { @@ -98,16 +98,16 @@ func (d dynamicNodeTaskNodeHandler) buildDynamicWorkflowTemplate(ctx context.Con return nil, errors.Wrapf("TaskReadFailed", err, "Failed to find task [%v].", nCtx.TaskReader().GetTaskID()) } - for _, t := range djSpec.Tasks { + for _, t := range djSpec.GetTasks() { if t.GetContainer() != nil && parentTask.GetContainer() != nil { - t.GetContainer().Config = append(t.GetContainer().Config, parentTask.GetContainer().Config...) + t.GetContainer().Config = append(t.GetContainer().Config, parentTask.GetContainer().GetConfig()...) } } } if nCtx.ExecutionContext().GetEventVersion() == v1alpha1.EventVersion0 { - for _, o := range djSpec.Outputs { - err = updateBindingNodeIDsWithLineage(parentNodeID, currentAttemptStr, o.Binding) + for _, o := range djSpec.GetOutputs() { + err = updateBindingNodeIDsWithLineage(parentNodeID, currentAttemptStr, o.GetBinding()) if err != nil { return nil, err } @@ -115,14 +115,14 @@ func (d dynamicNodeTaskNodeHandler) buildDynamicWorkflowTemplate(ctx context.Con } return &core.WorkflowTemplate{ Id: &core.Identifier{ - Project: nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetExecutionId().Project, - Domain: nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetExecutionId().Domain, - Name: fmt.Sprintf(dynamicWfNameTemplate, nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId), + Project: nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetExecutionId().GetProject(), + Domain: nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetExecutionId().GetDomain(), + Name: fmt.Sprintf(dynamicWfNameTemplate, nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId()), Version: rand.String(10), ResourceType: core.ResourceType_WORKFLOW, }, - Nodes: djSpec.Nodes, - Outputs: djSpec.Outputs, + Nodes: djSpec.GetNodes(), + Outputs: djSpec.GetOutputs(), Interface: iface, }, nil } @@ -228,14 +228,14 @@ func (d dynamicNodeTaskNodeHandler) buildDynamicWorkflow(ctx context.Context, nC return nil, nil, errors.Wrapf(utils.ErrorCodeSystem, err, "failed to build dynamic workflow template") } - compiledTasks, err := compileTasks(ctx, djSpec.Tasks) + compiledTasks, err := compileTasks(ctx, djSpec.GetTasks()) if err != nil { return nil, nil, errors.Wrapf(utils.ErrorCodeUser, err, "failed to compile dynamic tasks") } // Get the requirements, that is, a list of all the task IDs and the launch plan IDs that will be called as part of this dynamic task. // The definition of these will need to be fetched from Admin (in order to get the interface). - requirements, err := compiler.GetRequirements(wf, djSpec.Subworkflows) + requirements, err := compiler.GetRequirements(wf, djSpec.GetSubworkflows()) if err != nil { return nil, nil, errors.Wrapf(utils.ErrorCodeUser, err, "failed to Get requirements for subworkflows") } @@ -251,7 +251,7 @@ func (d dynamicNodeTaskNodeHandler) buildDynamicWorkflow(ctx context.Context, nC // See https://github.com/flyteorg/flyte/issues/219 for more information. var closure *core.CompiledWorkflowClosure - closure, err = compiler.CompileWorkflow(wf, djSpec.Subworkflows, compiledTasks, launchPlanInterfaces) + closure, err = compiler.CompileWorkflow(wf, djSpec.GetSubworkflows(), compiledTasks, launchPlanInterfaces) if err != nil { return nil, nil, errors.Wrapf(utils.ErrorCodeUser, err, "malformed dynamic workflow") } @@ -348,10 +348,10 @@ func (d dynamicNodeTaskNodeHandler) getLaunchPlanInterfaces(ctx context.Context, logger.Debugf(ctx, "Error fetching launch plan definition from admin") if launchplan.IsNotFound(err) || launchplan.IsUserError(err) { return nil, errors.Wrapf(utils.ErrorCodeUser, err, "incorrectly specified launchplan %s:%s:%s:%s", - id.Project, id.Domain, id.Name, id.Version) + id.GetProject(), id.GetDomain(), id.GetName(), id.GetVersion()) } return nil, errors.Wrapf(utils.ErrorCodeSystem, err, "unable to retrieve launchplan information %s:%s:%s:%s", - id.Project, id.Domain, id.Name, id.Version) + id.GetProject(), id.GetDomain(), id.GetName(), id.GetVersion()) } launchPlanInterfaces[idx] = compiler.NewLaunchPlanInterfaceProvider(lp) } diff --git a/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow_test.go b/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow_test.go index 3cb27dd65f..ec20c14cd0 100644 --- a/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow_test.go +++ b/flytepropeller/pkg/controller/nodes/dynamic/dynamic_workflow_test.go @@ -532,7 +532,7 @@ func Test_dynamicNodeHandler_buildContextualDynamicWorkflow_withLaunchPlans(t *t } mockLPLauncher := &mocks5.Reader{} mockLPLauncher.OnGetLaunchPlanMatch(mock.Anything, mock.MatchedBy(func(id *core.Identifier) bool { - return lpID.Name == id.Name && lpID.Domain == id.Domain && lpID.Project == id.Project && lpID.ResourceType == id.ResourceType + return lpID.GetName() == id.GetName() && lpID.GetDomain() == id.GetDomain() && lpID.GetProject() == id.GetProject() && lpID.GetResourceType() == id.GetResourceType() })).Return(&admin.LaunchPlan{ Id: lpID, Closure: &admin.LaunchPlanClosure{ diff --git a/flytepropeller/pkg/controller/nodes/dynamic/handler.go b/flytepropeller/pkg/controller/nodes/dynamic/handler.go index e23f145bb3..d8977eacdb 100644 --- a/flytepropeller/pkg/controller/nodes/dynamic/handler.go +++ b/flytepropeller/pkg/controller/nodes/dynamic/handler.go @@ -103,9 +103,9 @@ func (d dynamicNodeTaskNodeHandler) produceDynamicWorkflow(ctx context.Context, return handler.Transition{}, handler.DynamicNodeState{}, err } taskNodeInfoMetadata := &event.TaskNodeMetadata{} - if dCtx.subWorkflowClosure != nil && dCtx.subWorkflowClosure.Primary != nil && dCtx.subWorkflowClosure.Primary.Template != nil { + if dCtx.subWorkflowClosure != nil && dCtx.subWorkflowClosure.GetPrimary() != nil && dCtx.subWorkflowClosure.GetPrimary().GetTemplate() != nil { taskNodeInfoMetadata.DynamicWorkflow = &event.DynamicWorkflowNodeMetadata{ - Id: dCtx.subWorkflowClosure.Primary.Template.Id, + Id: dCtx.subWorkflowClosure.GetPrimary().GetTemplate().GetId(), CompiledWorkflow: dCtx.subWorkflowClosure, DynamicJobSpecUri: dCtx.dynamicJobSpecURI, } diff --git a/flytepropeller/pkg/controller/nodes/dynamic/utils.go b/flytepropeller/pkg/controller/nodes/dynamic/utils.go index 690cbe06a1..6a75e551c7 100644 --- a/flytepropeller/pkg/controller/nodes/dynamic/utils.go +++ b/flytepropeller/pkg/controller/nodes/dynamic/utils.go @@ -21,7 +21,7 @@ func underlyingInterface(ctx context.Context, taskReader interfaces.TaskReader) } if t.GetInterface() != nil { - iface.Outputs = t.GetInterface().Outputs + iface.Outputs = t.GetInterface().GetOutputs() } return iface, nil } @@ -31,21 +31,21 @@ func hierarchicalNodeID(parentNodeID, retryAttempt, nodeID string) (string, erro } func updateBindingNodeIDsWithLineage(parentNodeID, retryAttempt string, binding *core.BindingData) (err error) { - switch b := binding.Value.(type) { + switch b := binding.GetValue().(type) { case *core.BindingData_Promise: - b.Promise.NodeId, err = hierarchicalNodeID(parentNodeID, retryAttempt, b.Promise.NodeId) + b.Promise.NodeId, err = hierarchicalNodeID(parentNodeID, retryAttempt, b.Promise.GetNodeId()) if err != nil { return err } case *core.BindingData_Collection: - for _, item := range b.Collection.Bindings { + for _, item := range b.Collection.GetBindings() { err = updateBindingNodeIDsWithLineage(parentNodeID, retryAttempt, item) if err != nil { return err } } case *core.BindingData_Map: - for _, item := range b.Map.Bindings { + for _, item := range b.Map.GetBindings() { err = updateBindingNodeIDsWithLineage(parentNodeID, retryAttempt, item) if err != nil { return err @@ -60,7 +60,7 @@ func compileTasks(_ context.Context, tasks []*core.TaskTemplate) ([]*core.Compil compiledTasks := make([]*core.CompiledTask, 0, len(tasks)) visitedTasks := sets.NewString() for _, t := range tasks { - if visitedTasks.Has(t.Id.String()) { + if visitedTasks.Has(t.GetId().String()) { continue } @@ -70,7 +70,7 @@ func compileTasks(_ context.Context, tasks []*core.TaskTemplate) ([]*core.Compil } compiledTasks = append(compiledTasks, ct) - visitedTasks.Insert(t.Id.String()) + visitedTasks.Insert(t.GetId().String()) } return compiledTasks, nil diff --git a/flytepropeller/pkg/controller/nodes/dynamic/utils_test.go b/flytepropeller/pkg/controller/nodes/dynamic/utils_test.go index 6afdd487b9..ecc1904c49 100644 --- a/flytepropeller/pkg/controller/nodes/dynamic/utils_test.go +++ b/flytepropeller/pkg/controller/nodes/dynamic/utils_test.go @@ -62,5 +62,5 @@ func TestUnderlyingInterface(t *testing.T) { iface, err = underlyingInterface(context.TODO(), tr) assert.NoError(t, err) assert.NotNil(t, iface) - assert.Nil(t, iface.Outputs) + assert.Nil(t, iface.GetOutputs()) } diff --git a/flytepropeller/pkg/controller/nodes/executor.go b/flytepropeller/pkg/controller/nodes/executor.go index b25ad64fb6..04adfc5d54 100644 --- a/flytepropeller/pkg/controller/nodes/executor.go +++ b/flytepropeller/pkg/controller/nodes/executor.go @@ -522,19 +522,19 @@ func (c *nodeExecutor) RecordTransitionLatency(ctx context.Context, dag executor func (c *nodeExecutor) recoverInputs(ctx context.Context, nCtx interfaces.NodeExecutionContext, recovered *admin.NodeExecution, recoveredData *admin.NodeExecutionGetDataResponse) (*core.LiteralMap, error) { - nodeInputs := recoveredData.FullInputs + nodeInputs := recoveredData.GetFullInputs() if nodeInputs != nil { if err := c.store.WriteProtobuf(ctx, nCtx.InputReader().GetInputPath(), storage.Options{}, nodeInputs); err != nil { c.metrics.InputsWriteFailure.Inc(ctx) logger.Errorf(ctx, "Failed to move recovered inputs for Node. Error [%v]. InputsFile [%s]", err, nCtx.InputReader().GetInputPath()) return nil, errors.Wrapf(errors.StorageError, nCtx.NodeID(), err, "Failed to store inputs for Node. InputsFile [%s]", nCtx.InputReader().GetInputPath()) } - } else if len(recovered.InputUri) > 0 { + } else if len(recovered.GetInputUri()) > 0 { // If the inputs are too large they won't be returned inline in the RecoverData call. We must fetch them before copying them. nodeInputs = &core.LiteralMap{} - if recoveredData.FullInputs == nil { - if err := c.store.ReadProtobuf(ctx, storage.DataReference(recovered.InputUri), nodeInputs); err != nil { - return nil, errors.Wrapf(errors.InputsNotFoundError, nCtx.NodeID(), err, "failed to read data from dataDir [%v].", recovered.InputUri) + if recoveredData.GetFullInputs() == nil { + if err := c.store.ReadProtobuf(ctx, storage.DataReference(recovered.GetInputUri()), nodeInputs); err != nil { + return nil, errors.Wrapf(errors.InputsNotFoundError, nCtx.NodeID(), err, "failed to read data from dataDir [%v].", recovered.GetInputUri()) } } @@ -549,11 +549,11 @@ func (c *nodeExecutor) recoverInputs(ctx context.Context, nCtx interfaces.NodeEx } func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.NodeExecutionContext) (handler.PhaseInfo, error) { - fullyQualifiedNodeID := nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId + fullyQualifiedNodeID := nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId() if nCtx.ExecutionContext().GetEventVersion() != v1alpha1.EventVersion0 { // compute fully qualified node id (prefixed with parent id and retry attempt) to ensure uniqueness var err error - fullyQualifiedNodeID, err = common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId) + fullyQualifiedNodeID, err = common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId()) if err != nil { return handler.PhaseInfoUndefined, err } @@ -572,13 +572,13 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node logger.Warnf(ctx, "call to recover node [%+v] returned no error but also no node", nCtx.NodeExecutionMetadata().GetNodeExecutionID()) return handler.PhaseInfoUndefined, nil } - if recovered.Closure == nil { + if recovered.GetClosure() == nil { logger.Warnf(ctx, "Fetched node execution [%+v] data but was missing closure. Will not attempt to recover", nCtx.NodeExecutionMetadata().GetNodeExecutionID()) return handler.PhaseInfoUndefined, nil } // A recoverable node execution should always be in a terminal phase - switch recovered.Closure.Phase { + switch recovered.GetClosure().GetPhase() { case core.NodeExecution_SKIPPED: return handler.PhaseInfoUndefined, nil case core.NodeExecution_SUCCEEDED: @@ -588,9 +588,9 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node default: // The node execution may be partially recoverable through intra task checkpointing. Save the checkpoint // uri in the task node state to pass to the task handler later on. - if metadata, ok := recovered.Closure.TargetMetadata.(*admin.NodeExecutionClosure_TaskNodeMetadata); ok { + if metadata, ok := recovered.GetClosure().GetTargetMetadata().(*admin.NodeExecutionClosure_TaskNodeMetadata); ok { state := nCtx.NodeStateReader().GetTaskNodeState() - state.PreviousNodeExecutionCheckpointURI = storage.DataReference(metadata.TaskNodeMetadata.CheckpointUri) + state.PreviousNodeExecutionCheckpointURI = storage.DataReference(metadata.TaskNodeMetadata.GetCheckpointUri()) err = nCtx.NodeStateWriter().PutTaskNodeState(state) if err != nil { logger.Warnf(ctx, "failed to save recovered checkpoint uri for [%+v]: [%+v]", @@ -601,7 +601,7 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node // if this node is a dynamic task we attempt to recover the compiled workflow from instances where the parent // task succeeded but the dynamic task did not complete. this is important to ensure correctness since node ids // within the compiled closure may not be generated deterministically. - if recovered.Metadata != nil && recovered.Metadata.IsDynamic && len(recovered.Closure.DynamicJobSpecUri) > 0 { + if recovered.GetMetadata() != nil && recovered.GetMetadata().GetIsDynamic() && len(recovered.GetClosure().GetDynamicJobSpecUri()) > 0 { // recover node inputs recoveredData, err := c.recoveryClient.RecoverNodeExecutionData(ctx, nCtx.ExecutionContext().GetExecutionConfig().RecoveryExecution.WorkflowExecutionIdentifier, fullyQualifiedNodeID) @@ -619,7 +619,7 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node return handler.PhaseInfoUndefined, err } - dynamicJobSpecReference := storage.DataReference(recovered.Closure.DynamicJobSpecUri) + dynamicJobSpecReference := storage.DataReference(recovered.GetClosure().GetDynamicJobSpecUri()) if err := nCtx.DataStore().CopyRaw(ctx, dynamicJobSpecReference, f.GetLoc(), storage.Options{}); err != nil { return handler.PhaseInfoUndefined, errors.Wrapf(errors.StorageError, nCtx.NodeID(), err, "failed to store dynamic job spec for node. source file [%s] destination file [%s]", dynamicJobSpecReference, f.GetLoc()) @@ -635,7 +635,7 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node return handler.PhaseInfoRunning(&handler.ExecutionInfo{}), nil } - logger.Debugf(ctx, "Node [%+v] phase [%v] is not recoverable", nCtx.NodeExecutionMetadata().GetNodeExecutionID(), recovered.Closure.Phase) + logger.Debugf(ctx, "Node [%+v] phase [%v] is not recoverable", nCtx.NodeExecutionMetadata().GetNodeExecutionID(), recovered.GetClosure().GetPhase()) return handler.PhaseInfoUndefined, nil } @@ -662,13 +662,13 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node // Similarly, copy outputs' reference so := storage.Options{} var outputs = &core.LiteralMap{} - if recoveredData.FullOutputs != nil { - outputs = recoveredData.FullOutputs - } else if recovered.Closure.GetOutputData() != nil { - outputs = recovered.Closure.GetOutputData() - } else if len(recovered.Closure.GetOutputUri()) > 0 { - if err := c.store.ReadProtobuf(ctx, storage.DataReference(recovered.Closure.GetOutputUri()), outputs); err != nil { - return handler.PhaseInfoUndefined, errors.Wrapf(errors.InputsNotFoundError, nCtx.NodeID(), err, "failed to read output data [%v].", recovered.Closure.GetOutputUri()) + if recoveredData.GetFullOutputs() != nil { + outputs = recoveredData.GetFullOutputs() + } else if recovered.GetClosure().GetOutputData() != nil { + outputs = recovered.GetClosure().GetOutputData() + } else if len(recovered.GetClosure().GetOutputUri()) > 0 { + if err := c.store.ReadProtobuf(ctx, storage.DataReference(recovered.GetClosure().GetOutputUri()), outputs); err != nil { + return handler.PhaseInfoUndefined, errors.Wrapf(errors.InputsNotFoundError, nCtx.NodeID(), err, "failed to read output data [%v].", recovered.GetClosure().GetOutputUri()) } } else { logger.Debugf(ctx, "No outputs found for recovered node [%+v]", nCtx.NodeExecutionMetadata().GetNodeExecutionID()) @@ -679,7 +679,7 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node OutputURI: outputFile, } - deckFile := storage.DataReference(recovered.Closure.GetDeckUri()) + deckFile := storage.DataReference(recovered.GetClosure().GetDeckUri()) if len(deckFile) > 0 { metadata, err := nCtx.DataStore().Head(ctx, deckFile) if err != nil { @@ -702,24 +702,24 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node OutputInfo: oi, } - if recovered.Closure.GetTaskNodeMetadata() != nil { + if recovered.GetClosure().GetTaskNodeMetadata() != nil { taskNodeInfo := &handler.TaskNodeInfo{ TaskNodeMetadata: &event.TaskNodeMetadata{ - CatalogKey: recovered.Closure.GetTaskNodeMetadata().CatalogKey, - CacheStatus: recovered.Closure.GetTaskNodeMetadata().CacheStatus, + CatalogKey: recovered.GetClosure().GetTaskNodeMetadata().GetCatalogKey(), + CacheStatus: recovered.GetClosure().GetTaskNodeMetadata().GetCacheStatus(), }, } - if recoveredData.DynamicWorkflow != nil { + if recoveredData.GetDynamicWorkflow() != nil { taskNodeInfo.TaskNodeMetadata.DynamicWorkflow = &event.DynamicWorkflowNodeMetadata{ - Id: recoveredData.DynamicWorkflow.Id, - CompiledWorkflow: recoveredData.DynamicWorkflow.CompiledWorkflow, + Id: recoveredData.GetDynamicWorkflow().GetId(), + CompiledWorkflow: recoveredData.GetDynamicWorkflow().GetCompiledWorkflow(), } } info.TaskNodeInfo = taskNodeInfo - } else if recovered.Closure.GetWorkflowNodeMetadata() != nil { + } else if recovered.GetClosure().GetWorkflowNodeMetadata() != nil { logger.Warnf(ctx, "Attempted to recover node") info.WorkflowNodeInfo = &handler.WorkflowNodeInfo{ - LaunchedWorkflowID: recovered.Closure.GetWorkflowNodeMetadata().ExecutionId, + LaunchedWorkflowID: recovered.GetClosure().GetWorkflowNodeMetadata().GetExecutionId(), } } return handler.PhaseInfoRecovered(info), nil @@ -765,7 +765,7 @@ func (c *nodeExecutor) preExecute(ctx context.Context, dag executors.DAGStructur } if nodeInputs != nil { - p := common.CheckOffloadingCompat(ctx, nCtx, nodeInputs.Literals, node, c.literalOffloadingConfig) + p := common.CheckOffloadingCompat(ctx, nCtx, nodeInputs.GetLiterals(), node, c.literalOffloadingConfig) if p != nil { return *p, nil } @@ -809,7 +809,7 @@ func (c *nodeExecutor) isEligibleForRetry(nCtx interfaces.NodeExecutionContext, if config.GetConfig().NodeConfig.IgnoreRetryCause { currentAttempt = nodeStatus.GetAttempts() + 1 } else { - if err.Kind == core.ExecutionError_SYSTEM { + if err.GetKind() == core.ExecutionError_SYSTEM { currentAttempt = nodeStatus.GetSystemFailures() maxAttempts = c.maxNodeRetriesForSystemFailures isEligible = currentAttempt < c.maxNodeRetriesForSystemFailures @@ -818,9 +818,9 @@ func (c *nodeExecutor) isEligibleForRetry(nCtx interfaces.NodeExecutionContext, currentAttempt = (nodeStatus.GetAttempts() + 1) - nodeStatus.GetSystemFailures() } - maxAttempts = uint32(config.GetConfig().NodeConfig.DefaultMaxAttempts) + maxAttempts = uint32(config.GetConfig().NodeConfig.DefaultMaxAttempts) // #nosec G115 if nCtx.Node().GetRetryStrategy() != nil && nCtx.Node().GetRetryStrategy().MinAttempts != nil && *nCtx.Node().GetRetryStrategy().MinAttempts != 1 { - maxAttempts = uint32(*nCtx.Node().GetRetryStrategy().MinAttempts) + maxAttempts = uint32(*nCtx.Node().GetRetryStrategy().MinAttempts) // #nosec G115 } isEligible = currentAttempt < maxAttempts return @@ -864,8 +864,8 @@ func (c *nodeExecutor) execute(ctx context.Context, h interfaces.NodeHandler, nC if !isEligible { return handler.PhaseInfoFailure( core.ExecutionError_USER, - fmt.Sprintf("RetriesExhausted|%s", phase.GetErr().Code), - fmt.Sprintf("[%d/%d] currentAttempt done. Last Error: %s::%s", currentAttempt, maxAttempts, phase.GetErr().Kind.String(), phase.GetErr().Message), + fmt.Sprintf("RetriesExhausted|%s", phase.GetErr().GetCode()), + fmt.Sprintf("[%d/%d] currentAttempt done. Last Error: %s::%s", currentAttempt, maxAttempts, phase.GetErr().GetKind().String(), phase.GetErr().GetMessage()), phase.GetInfo(), ), nil } @@ -894,11 +894,11 @@ func (c *nodeExecutor) Abort(ctx context.Context, h interfaces.NodeHandler, nCtx // only send event if this is the final transition for this node if finalTransition { nodeExecutionID := &core.NodeExecutionIdentifier{ - ExecutionId: nCtx.NodeExecutionMetadata().GetNodeExecutionID().ExecutionId, - NodeId: nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId, + ExecutionId: nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetExecutionId(), + NodeId: nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId(), } if nCtx.ExecutionContext().GetEventVersion() != v1alpha1.EventVersion0 { - currentNodeUniqueID, err := common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nodeExecutionID.NodeId) + currentNodeUniqueID, err := common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nodeExecutionID.GetNodeId()) if err != nil { return err } @@ -1483,7 +1483,7 @@ func NewExecutor(ctx context.Context, nodeConfig config.NodeConfig, store *stora eventConfig: eventConfig, literalOffloadingConfig: literalOffloadingConfig, interruptibleFailureThreshold: nodeConfig.InterruptibleFailureThreshold, - maxNodeRetriesForSystemFailures: uint32(nodeConfig.MaxNodeRetriesOnSystemFailures), + maxNodeRetriesForSystemFailures: uint32(nodeConfig.MaxNodeRetriesOnSystemFailures), // #nosec G115 metrics: metrics, nodeRecorder: events.NewNodeEventRecorder(eventSink, nodeScope, store), outputResolver: NewRemoteFileOutputResolver(store), diff --git a/flytepropeller/pkg/controller/nodes/executor_test.go b/flytepropeller/pkg/controller/nodes/executor_test.go index 35ab105623..329d52540d 100644 --- a/flytepropeller/pkg/controller/nodes/executor_test.go +++ b/flytepropeller/pkg/controller/nodes/executor_test.go @@ -779,7 +779,7 @@ func TestNodeExecutor_RecursiveNodeHandler_Recurse(t *testing.T) { evRecorder := &eventMocks.NodeEventRecorder{} evRecorder.OnRecordNodeEventMatch(mock.Anything, mock.MatchedBy(func(ev *event.NodeExecutionEvent) bool { assert.NotNil(t, ev) - assert.Equal(t, test.eventPhase, ev.Phase) + assert.Equal(t, test.eventPhase, ev.GetPhase()) called = true return true }), mock.Anything).Return(nil) @@ -893,7 +893,7 @@ func TestNodeExecutor_RecursiveNodeHandler_Recurse(t *testing.T) { evRecorder := &eventMocks.NodeEventRecorder{} evRecorder.OnRecordNodeEventMatch(mock.Anything, mock.MatchedBy(func(ev *event.NodeExecutionEvent) bool { assert.NotNil(t, ev) - assert.Equal(t, test.eventPhase, ev.Phase) + assert.Equal(t, test.eventPhase, ev.GetPhase()) called = true return true }), mock.Anything).Return(nil) @@ -939,7 +939,7 @@ func TestNodeExecutor_RecursiveNodeHandler_Recurse(t *testing.T) { } else { assert.Nil(t, s.Err) } - assert.Equal(t, uint32(test.attempts), mockNodeStatus.GetAttempts()) + assert.Equal(t, uint32(test.attempts), mockNodeStatus.GetAttempts()) // #nosec G115 assert.Equal(t, test.eventRecorded, called, "event recording expected: %v, but got %v", test.eventRecorded, called) }) } @@ -1770,18 +1770,18 @@ func TestNodeExecutionEventStartNode(t *testing.T) { }, subWfID) assert.NoError(t, err) - assert.Equal(t, "start-node", ev.Id.NodeId) - assert.Equal(t, execID, ev.Id.ExecutionId) - assert.Empty(t, ev.SpecNodeId) - assert.Nil(t, ev.ParentNodeMetadata) - assert.Equal(t, tID, ev.ParentTaskMetadata.Id) - assert.Empty(t, ev.NodeName) - assert.Empty(t, ev.RetryGroup) + assert.Equal(t, "start-node", ev.GetId().GetNodeId()) + assert.Equal(t, execID, ev.GetId().GetExecutionId()) + assert.Empty(t, ev.GetSpecNodeId()) + assert.Nil(t, ev.GetParentNodeMetadata()) + assert.Equal(t, tID, ev.GetParentTaskMetadata().GetId()) + assert.Empty(t, ev.GetNodeName()) + assert.Empty(t, ev.GetRetryGroup()) assert.Equal(t, "dummy://dummyOutUrl/outputs.pb", - ev.OutputResult.(*event.NodeExecutionEvent_OutputUri).OutputUri) - assert.Equal(t, ev.ProducerId, testClusterID) + ev.GetOutputResult().(*event.NodeExecutionEvent_OutputUri).OutputUri) + assert.Equal(t, ev.GetProducerId(), testClusterID) assert.Equal(t, subWfID, ev.GetTargetEntity()) - assert.Nil(t, ev.InputValue) + assert.Nil(t, ev.GetInputValue()) } func TestNodeExecutionEventV0(t *testing.T) { @@ -1817,14 +1817,14 @@ func TestNodeExecutionEventV0(t *testing.T) { RawOutputPolicy: config.RawOutputPolicyReference, }, nil) assert.NoError(t, err) - assert.Equal(t, "n1", ev.Id.NodeId) - assert.Equal(t, execID, ev.Id.ExecutionId) - assert.Empty(t, ev.SpecNodeId) - assert.Nil(t, ev.ParentNodeMetadata) - assert.Equal(t, tID, ev.ParentTaskMetadata.Id) - assert.Empty(t, ev.NodeName) - assert.Empty(t, ev.RetryGroup) - assert.Empty(t, ev.TargetEntity) + assert.Equal(t, "n1", ev.GetId().GetNodeId()) + assert.Equal(t, execID, ev.GetId().GetExecutionId()) + assert.Empty(t, ev.GetSpecNodeId()) + assert.Nil(t, ev.GetParentNodeMetadata()) + assert.Equal(t, tID, ev.GetParentTaskMetadata().GetId()) + assert.Empty(t, ev.GetNodeName()) + assert.Empty(t, ev.GetRetryGroup()) + assert.Empty(t, ev.GetTargetEntity()) assert.Equal(t, "reference", ev.GetInputUri()) } @@ -1870,18 +1870,18 @@ func TestNodeExecutionEventV1(t *testing.T) { }, nil) assert.NoError(t, err) - assert.Equal(t, "np1-2-n1", eventOpt.Id.NodeId) - assert.Equal(t, execID, eventOpt.Id.ExecutionId) - assert.Equal(t, "id", eventOpt.SpecNodeId) + assert.Equal(t, "np1-2-n1", eventOpt.GetId().GetNodeId()) + assert.Equal(t, execID, eventOpt.GetId().GetExecutionId()) + assert.Equal(t, "id", eventOpt.GetSpecNodeId()) expectParentMetadata := event.ParentNodeExecutionMetadata{ NodeId: "np1", } - assert.Equal(t, expectParentMetadata, *eventOpt.ParentNodeMetadata) - assert.Nil(t, eventOpt.ParentTaskMetadata) - assert.Equal(t, "name", eventOpt.NodeName) - assert.Equal(t, "2", eventOpt.RetryGroup) + assert.True(t, proto.Equal(&expectParentMetadata, eventOpt.GetParentNodeMetadata())) + assert.Nil(t, eventOpt.GetParentTaskMetadata()) + assert.Equal(t, "name", eventOpt.GetNodeName()) + assert.Equal(t, "2", eventOpt.GetRetryGroup()) assert.True(t, proto.Equal(eventOpt.GetInputData(), inputs)) - assert.Empty(t, eventOpt.TargetEntity) + assert.Empty(t, eventOpt.GetTargetEntity()) assert.Equal(t, inputs, eventOpt.GetInputData()) } @@ -2326,8 +2326,8 @@ func TestRecover(t *testing.T) { }, CacheStatus: core.CatalogCacheStatus_CACHE_HIT, DynamicWorkflow: &event.DynamicWorkflowNodeMetadata{ - Id: dynamicWorkflow.Id, - CompiledWorkflow: dynamicWorkflow.CompiledWorkflow, + Id: dynamicWorkflow.GetId(), + CompiledWorkflow: dynamicWorkflow.GetCompiledWorkflow(), }, }, phaseInfo.GetInfo().TaskNodeInfo.TaskNodeMetadata)) }) diff --git a/flytepropeller/pkg/controller/nodes/gate/handler.go b/flytepropeller/pkg/controller/nodes/gate/handler.go index 00d2cb989f..c1308c8ed3 100644 --- a/flytepropeller/pkg/controller/nodes/gate/handler.go +++ b/flytepropeller/pkg/controller/nodes/gate/handler.go @@ -84,7 +84,7 @@ func (g *gateNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecut request := &admin.SignalGetOrCreateRequest{ Id: &core.SignalIdentifier{ ExecutionId: nCtx.ExecutionContext().GetExecutionID().WorkflowExecutionIdentifier, - SignalId: approveCondition.SignalId, + SignalId: approveCondition.GetSignalId(), }, Type: &core.LiteralType{ Type: &core.LiteralType_Simple{ @@ -99,10 +99,10 @@ func (g *gateNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecut } // if signal has value then check for approval - if signal.Value != nil && signal.Value.Value != nil { - approved, ok := getBoolean(signal.Value) + if signal.GetValue() != nil && signal.Value.Value != nil { + approved, ok := getBoolean(signal.GetValue()) if !ok { - errMsg := fmt.Sprintf("received a non-boolean approve signal value [%v]", signal.Value) + errMsg := fmt.Sprintf("received a non-boolean approve signal value [%v]", signal.GetValue()) return handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoFailure(core.ExecutionError_UNKNOWN, errors.RuntimeExecutionError, errMsg, nil)), nil } @@ -143,9 +143,9 @@ func (g *gateNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecut request := &admin.SignalGetOrCreateRequest{ Id: &core.SignalIdentifier{ ExecutionId: nCtx.ExecutionContext().GetExecutionID().WorkflowExecutionIdentifier, - SignalId: signalCondition.SignalId, + SignalId: signalCondition.GetSignalId(), }, - Type: signalCondition.Type, + Type: signalCondition.GetType(), } signal, err := g.signalClient.GetOrCreateSignal(ctx, request) @@ -154,10 +154,10 @@ func (g *gateNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecut } // if signal has value then write to output and transition to success - if signal.Value != nil && signal.Value.Value != nil { + if signal.GetValue() != nil && signal.Value.Value != nil { outputs := &core.LiteralMap{ Literals: map[string]*core.Literal{ - signalCondition.OutputVariableName: signal.Value, + signalCondition.GetOutputVariableName(): signal.GetValue(), }, } @@ -218,9 +218,9 @@ func New(eventConfig *config.EventConfig, signalClient service.SignalServiceClie } func getBoolean(literal *core.Literal) (bool, bool) { - if scalarValue, ok := literal.Value.(*core.Literal_Scalar); ok { - if primitiveValue, ok := scalarValue.Scalar.Value.(*core.Scalar_Primitive); ok { - if booleanValue, ok := primitiveValue.Primitive.Value.(*core.Primitive_Boolean); ok { + if scalarValue, ok := literal.GetValue().(*core.Literal_Scalar); ok { + if primitiveValue, ok := scalarValue.Scalar.GetValue().(*core.Scalar_Primitive); ok { + if booleanValue, ok := primitiveValue.Primitive.GetValue().(*core.Primitive_Boolean); ok { return booleanValue.Boolean, true } } diff --git a/flytepropeller/pkg/controller/nodes/handler/state.go b/flytepropeller/pkg/controller/nodes/handler/state.go index a7fa7bdf87..c3e35e67d7 100644 --- a/flytepropeller/pkg/controller/nodes/handler/state.go +++ b/flytepropeller/pkg/controller/nodes/handler/state.go @@ -48,11 +48,12 @@ type GateNodeState struct { } type ArrayNodeState struct { - Phase v1alpha1.ArrayNodePhase - TaskPhaseVersion uint32 - Error *core.ExecutionError - SubNodePhases bitarray.CompactArray - SubNodeTaskPhases bitarray.CompactArray - SubNodeRetryAttempts bitarray.CompactArray - SubNodeSystemFailures bitarray.CompactArray + Phase v1alpha1.ArrayNodePhase + TaskPhaseVersion uint32 + Error *core.ExecutionError + SubNodePhases bitarray.CompactArray + SubNodeTaskPhases bitarray.CompactArray + SubNodeRetryAttempts bitarray.CompactArray + SubNodeSystemFailures bitarray.CompactArray + SubNodeDeltaTimestamps bitarray.CompactArray } diff --git a/flytepropeller/pkg/controller/nodes/handler/transition_info.go b/flytepropeller/pkg/controller/nodes/handler/transition_info.go index c9af525cca..7e787a9424 100644 --- a/flytepropeller/pkg/controller/nodes/handler/transition_info.go +++ b/flytepropeller/pkg/controller/nodes/handler/transition_info.go @@ -173,7 +173,7 @@ func phaseInfoFailed(p EPhase, err *core.ExecutionError, info *ExecutionInfo) Ph } } - return phaseInfo(p, err, info, err.Message) + return phaseInfo(p, err, info, err.GetMessage()) } func PhaseInfoFailure(kind core.ExecutionError_ErrorKind, code, reason string, info *ExecutionInfo) PhaseInfo { diff --git a/flytepropeller/pkg/controller/nodes/handler/transition_info_test.go b/flytepropeller/pkg/controller/nodes/handler/transition_info_test.go index 883dbd5f45..91042fc588 100644 --- a/flytepropeller/pkg/controller/nodes/handler/transition_info_test.go +++ b/flytepropeller/pkg/controller/nodes/handler/transition_info_test.go @@ -110,8 +110,8 @@ func TestPhaseInfo(t *testing.T) { assert.Equal(t, EPhaseFailed, p.GetPhase()) assert.Equal(t, i, p.GetInfo()) if assert.NotNil(t, p.GetErr()) { - assert.Equal(t, "code", p.GetErr().Code) - assert.Equal(t, "reason", p.GetErr().Message) + assert.Equal(t, "code", p.GetErr().GetCode()) + assert.Equal(t, "reason", p.GetErr().GetMessage()) } assert.NotNil(t, p.GetOccurredAt()) }) @@ -141,8 +141,8 @@ func TestPhaseInfo(t *testing.T) { assert.Equal(t, EPhaseRetryableFailure, p.GetPhase()) assert.Equal(t, i, p.GetInfo()) if assert.NotNil(t, p.GetErr()) { - assert.Equal(t, "code", p.GetErr().Code) - assert.Equal(t, "reason", p.GetErr().Message) + assert.Equal(t, "code", p.GetErr().GetCode()) + assert.Equal(t, "reason", p.GetErr().GetMessage()) } assert.NotNil(t, p.GetOccurredAt()) }) diff --git a/flytepropeller/pkg/controller/nodes/node_exec_context.go b/flytepropeller/pkg/controller/nodes/node_exec_context.go index 7de31100c6..9721d2af6c 100644 --- a/flytepropeller/pkg/controller/nodes/node_exec_context.go +++ b/flytepropeller/pkg/controller/nodes/node_exec_context.go @@ -39,16 +39,16 @@ func (e eventRecorder) RecordTaskEvent(ctx context.Context, ev *event.TaskExecut if eventConfig.ErrorOnAlreadyExists { return err } - logger.Warningf(ctx, "Failed to record taskEvent, error [%s]. Trying to record state: %s. Ignoring this error!", err.Error(), ev.Phase) + logger.Warningf(ctx, "Failed to record taskEvent, error [%s]. Trying to record state: %s. Ignoring this error!", err.Error(), ev.GetPhase()) return nil } else if eventsErr.IsEventAlreadyInTerminalStateError(err) { - if IsTerminalTaskPhase(ev.Phase) { + if IsTerminalTaskPhase(ev.GetPhase()) { // Event is terminal and the stored value in flyteadmin is already terminal. This implies aborted case. So ignoring - logger.Warningf(ctx, "Failed to record taskEvent, error [%s]. Trying to record state: %s. Ignoring this error!", err.Error(), ev.Phase) + logger.Warningf(ctx, "Failed to record taskEvent, error [%s]. Trying to record state: %s. Ignoring this error!", err.Error(), ev.GetPhase()) return nil } - logger.Warningf(ctx, "Failed to record taskEvent in state: %s, error: %s", ev.Phase, err) - return errors.Wrapf(err, "failed to record task event, as it already exists in terminal state. Event state: %s", ev.Phase) + logger.Warningf(ctx, "Failed to record taskEvent in state: %s, error: %s", ev.GetPhase(), err) + return errors.Wrapf(err, "failed to record task event, as it already exists in terminal state. Event state: %s", ev.GetPhase()) } return err } @@ -60,30 +60,30 @@ func (e eventRecorder) RecordNodeEvent(ctx context.Context, nodeEvent *event.Nod return fmt.Errorf("event recording attempt of Nil Node execution event") } - if nodeEvent.Id == nil { + if nodeEvent.GetId() == nil { return fmt.Errorf("event recording attempt of with nil node Event ID") } - logger.Infof(ctx, "Recording NodeEvent [%s] phase[%s]", nodeEvent.GetId().String(), nodeEvent.Phase.String()) + logger.Infof(ctx, "Recording NodeEvent [%s] phase[%s]", nodeEvent.GetId().String(), nodeEvent.GetPhase().String()) err := e.nodeEventRecorder.RecordNodeEvent(ctx, nodeEvent, eventConfig) if err != nil { - if nodeEvent.GetId().NodeId == v1alpha1.EndNodeID { + if nodeEvent.GetId().GetNodeId() == v1alpha1.EndNodeID { return nil } if eventsErr.IsAlreadyExists(err) { logger.Infof(ctx, "Node event phase: %s, nodeId %s already exist", - nodeEvent.Phase.String(), nodeEvent.GetId().NodeId) + nodeEvent.GetPhase().String(), nodeEvent.GetId().GetNodeId()) return nil } else if eventsErr.IsEventAlreadyInTerminalStateError(err) { - if IsTerminalNodePhase(nodeEvent.Phase) { + if IsTerminalNodePhase(nodeEvent.GetPhase()) { // Event was trying to record a different terminal phase for an already terminal event. ignoring. logger.Infof(ctx, "Node event phase: %s, nodeId %s already in terminal phase. err: %s", - nodeEvent.Phase.String(), nodeEvent.GetId().NodeId, err.Error()) + nodeEvent.GetPhase().String(), nodeEvent.GetId().GetNodeId(), err.Error()) return nil } logger.Warningf(ctx, "Failed to record nodeEvent, error [%s]", err.Error()) - return nodeerrors.Wrapf(nodeerrors.IllegalStateError, nodeEvent.Id.NodeId, err, "phase mismatch mismatch between propeller and control plane; Trying to record Node p: %s", nodeEvent.Phase) + return nodeerrors.Wrapf(nodeerrors.IllegalStateError, nodeEvent.GetId().GetNodeId(), err, "phase mismatch mismatch between propeller and control plane; Trying to record Node p: %s", nodeEvent.GetPhase()) } } return err @@ -223,7 +223,7 @@ func newNodeExecContext(_ context.Context, store *storage.DataStore, execContext } nodeLabels[NodeIDLabel] = utils.SanitizeLabelValue(node.GetID()) if tr != nil && tr.GetTaskID() != nil { - nodeLabels[TaskNameLabel] = utils.SanitizeLabelValue(tr.GetTaskID().Name) + nodeLabels[TaskNameLabel] = utils.SanitizeLabelValue(tr.GetTaskID().GetName()) } nodeLabels[NodeInterruptibleLabel] = strconv.FormatBool(interruptible) md.nodeLabels = nodeLabels @@ -290,9 +290,9 @@ func (c *nodeExecutor) BuildNodeExecutionContext(ctx context.Context, executionC if config.GetConfig().NodeConfig.IgnoreRetryCause { // For the unified retry behavior we execute the last interruptibleFailureThreshold attempts on a non // interruptible machine - maxAttempts := uint32(config.GetConfig().NodeConfig.DefaultMaxAttempts) + maxAttempts := uint32(config.GetConfig().NodeConfig.DefaultMaxAttempts) // #nosec G115 if n.GetRetryStrategy() != nil && n.GetRetryStrategy().MinAttempts != nil && *n.GetRetryStrategy().MinAttempts != 1 { - maxAttempts = uint32(*n.GetRetryStrategy().MinAttempts) + maxAttempts = uint32(*n.GetRetryStrategy().MinAttempts) // #nosec G115 } // For interruptible nodes run at least one attempt on an interruptible machine (thus s.GetAttempts() > 0) even if there won't be any retries diff --git a/flytepropeller/pkg/controller/nodes/node_exec_context_test.go b/flytepropeller/pkg/controller/nodes/node_exec_context_test.go index 4614d0f035..2f421a7a7b 100644 --- a/flytepropeller/pkg/controller/nodes/node_exec_context_test.go +++ b/flytepropeller/pkg/controller/nodes/node_exec_context_test.go @@ -147,10 +147,10 @@ func Test_NodeContextDefault(t *testing.T) { // Test that retrieving task nodes taskIdentifier := common.GetTargetEntity(ctx, nodeExecContext) - assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.Id.Project, taskIdentifier.Project) - assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.Id.Domain, taskIdentifier.Domain) - assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.Id.Name, taskIdentifier.Name) - assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.Id.Version, taskIdentifier.Version) + assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.GetId().GetProject(), taskIdentifier.GetProject()) + assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.GetId().GetDomain(), taskIdentifier.GetDomain()) + assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.GetId().GetName(), taskIdentifier.GetName()) + assert.Equal(t, w1.Tasks["taskID"].TaskTemplate.GetId().GetVersion(), taskIdentifier.GetVersion()) } func TestGetTargetEntity_LaunchPlanNode(t *testing.T) { @@ -173,10 +173,10 @@ func TestGetTargetEntity_LaunchPlanNode(t *testing.T) { nCtx.OnNode().Return(n) fetchedID := common.GetTargetEntity(context.Background(), nCtx) - assert.Equal(t, id.Project, fetchedID.Project) - assert.Equal(t, id.Domain, fetchedID.Domain) - assert.Equal(t, id.Name, fetchedID.Name) - assert.Equal(t, id.Version, fetchedID.Version) + assert.Equal(t, id.GetProject(), fetchedID.GetProject()) + assert.Equal(t, id.GetDomain(), fetchedID.GetDomain()) + assert.Equal(t, id.GetName(), fetchedID.GetName()) + assert.Equal(t, id.GetVersion(), fetchedID.GetVersion()) } func TestGetTargetEntity_EmptyTask(t *testing.T) { diff --git a/flytepropeller/pkg/controller/nodes/node_state_manager.go b/flytepropeller/pkg/controller/nodes/node_state_manager.go index a9ead9afc3..25b0bc55df 100644 --- a/flytepropeller/pkg/controller/nodes/node_state_manager.go +++ b/flytepropeller/pkg/controller/nodes/node_state_manager.go @@ -80,7 +80,7 @@ func (n nodeStateManager) GetTaskNodeState() handler.TaskNodeState { tn := n.nodeStatus.GetTaskNodeStatus() if tn != nil { return handler.TaskNodeState{ - PluginPhase: pluginCore.Phase(tn.GetPhase()), + PluginPhase: pluginCore.Phase(tn.GetPhase()), // #nosec G115 PluginPhaseVersion: tn.GetPhaseVersion(), PluginStateVersion: tn.GetPluginStateVersion(), PluginState: tn.GetPluginState(), @@ -181,6 +181,11 @@ func (n nodeStateManager) GetArrayNodeState() handler.ArrayNodeState { if subNodeSystemFailuresCopy := subNodeSystemFailures.DeepCopy(); subNodeSystemFailuresCopy != nil { as.SubNodeSystemFailures = *subNodeSystemFailuresCopy } + + subNodeDeltaTimestamps := an.GetSubNodeDeltaTimestamps() + if subNodeDeltaTimestampsCopy := subNodeDeltaTimestamps.DeepCopy(); subNodeDeltaTimestampsCopy != nil { + as.SubNodeDeltaTimestamps = *subNodeDeltaTimestampsCopy + } } return as } diff --git a/flytepropeller/pkg/controller/nodes/output_resolver.go b/flytepropeller/pkg/controller/nodes/output_resolver.go index df8a6dfe19..620064d2ac 100644 --- a/flytepropeller/pkg/controller/nodes/output_resolver.go +++ b/flytepropeller/pkg/controller/nodes/output_resolver.go @@ -86,7 +86,7 @@ func resolveSubtaskOutput(ctx context.Context, store storage.ProtobufStore, node "Outputs not found at [%v]", outputsFileRef) } - l, ok := d.Literals[varName] + l, ok := d.GetLiterals()[varName] if !ok { return nil, errors.Errorf(errors.BadSpecificationError, nodeID, "Output of array tasks is expected to be "+ "a single literal map entry named 'array' of type LiteralCollection.") @@ -97,7 +97,7 @@ func resolveSubtaskOutput(ctx context.Context, store storage.ProtobufStore, node "is of type [%v]. LiteralCollection is expected.", reflect.TypeOf(l.GetValue())) } - literals := l.GetCollection().Literals + literals := l.GetCollection().GetLiterals() if idx >= len(literals) { return nil, errors.Errorf(errors.OutputsNotFoundError, nodeID, "Failed to find [%v[%v].%v]", nodeID, idx, varName) @@ -120,7 +120,7 @@ func resolveSingleOutput(ctx context.Context, store storage.ProtobufStore, nodeI "Outputs not found at [%v]", outputsFileRef) } - l, ok := d.Literals[varName] + l, ok := d.GetLiterals()[varName] if !ok { return nil, errors.Errorf(errors.OutputsNotFoundError, nodeID, "Failed to find [%v].[%v]", nodeID, varName) diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/handler_test.go b/flytepropeller/pkg/controller/nodes/subworkflow/handler_test.go index ea21ce1171..f819a47f0b 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/handler_test.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/handler_test.go @@ -174,11 +174,11 @@ func TestWorkflowNodeHandler_StartNode_Launchplan(t *testing.T) { mockLPExec.OnLaunchMatch( ctx, mock.MatchedBy(func(o launchplan.LaunchContext) bool { - return o.ParentNodeExecution.NodeId == mockNode.GetID() && - o.ParentNodeExecution.ExecutionId == wfExecID + return o.ParentNodeExecution.GetNodeId() == mockNode.GetID() && + o.ParentNodeExecution.GetExecutionId() == wfExecID }), mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.MatchedBy(func(o *core.Identifier) bool { return lpID == o }), mock.MatchedBy(func(o *core.LiteralMap) bool { return o.Literals == nil }), @@ -200,11 +200,11 @@ func TestWorkflowNodeHandler_StartNode_Launchplan(t *testing.T) { mockLPExec.OnLaunchMatch( ctx, mock.MatchedBy(func(o launchplan.LaunchContext) bool { - return o.ParentNodeExecution.NodeId == mockNode.GetID() && - o.ParentNodeExecution.ExecutionId == wfExecID + return o.ParentNodeExecution.GetNodeId() == mockNode.GetID() && + o.ParentNodeExecution.GetExecutionId() == wfExecID }), mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.MatchedBy(func(o *core.Identifier) bool { return lpID == o }), mock.MatchedBy(func(o *core.LiteralMap) bool { return o.Literals == nil }), @@ -256,7 +256,7 @@ func TestWorkflowNodeHandler_CheckNodeStatus(t *testing.T) { mockLPExec.OnGetStatusMatch( ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_RUNNING, @@ -277,7 +277,7 @@ func TestWorkflowNodeHandler_CheckNodeStatus(t *testing.T) { mockLPExec.OnGetStatusMatch( ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_RUNNING, @@ -329,7 +329,7 @@ func TestWorkflowNodeHandler_AbortNode(t *testing.T) { mockLPExec.OnKillMatch( ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.AnythingOfType(reflect.String.String()), ).Return(nil) @@ -351,7 +351,7 @@ func TestWorkflowNodeHandler_AbortNode(t *testing.T) { mockLPExec.OnKillMatch( ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.AnythingOfType(reflect.String.String()), ).Return(nil) @@ -371,7 +371,7 @@ func TestWorkflowNodeHandler_AbortNode(t *testing.T) { mockLPExec.OnKillMatch( ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.AnythingOfType(reflect.String.String()), ).Return(expectedErr) diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan.go b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan.go index 60802a6486..16d0134740 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan.go @@ -28,17 +28,17 @@ type launchPlanHandler struct { func getParentNodeExecutionID(nCtx interfaces.NodeExecutionContext) (*core.NodeExecutionIdentifier, error) { nodeExecID := &core.NodeExecutionIdentifier{ - ExecutionId: nCtx.NodeExecutionMetadata().GetNodeExecutionID().ExecutionId, + ExecutionId: nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetExecutionId(), } if nCtx.ExecutionContext().GetEventVersion() != v1alpha1.EventVersion0 { var err error - currentNodeUniqueID, err := common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId) + currentNodeUniqueID, err := common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId()) if err != nil { return nil, err } nodeExecID.NodeId = currentNodeUniqueID } else { - nodeExecID.NodeId = nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId + nodeExecID.NodeId = nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId() } return nodeExecID, nil } @@ -77,11 +77,11 @@ func (l *launchPlanHandler) StartLaunchPlan(ctx context.Context, nCtx interfaces } if nCtx.ExecutionContext().GetExecutionConfig().RecoveryExecution.WorkflowExecutionIdentifier != nil { - fullyQualifiedNodeID := nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId + fullyQualifiedNodeID := nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId() if nCtx.ExecutionContext().GetEventVersion() != v1alpha1.EventVersion0 { // compute fully qualified node id (prefixed with parent id and retry attempt) to ensure uniqueness var err error - fullyQualifiedNodeID, err = common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nCtx.NodeExecutionMetadata().GetNodeExecutionID().NodeId) + fullyQualifiedNodeID, err = common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nCtx.NodeExecutionMetadata().GetNodeExecutionID().GetNodeId()) if err != nil { return handler.UnknownTransition, err } @@ -94,11 +94,11 @@ func (l *launchPlanHandler) StartLaunchPlan(ctx context.Context, nCtx interfaces logger.Warnf(ctx, "Failed to recover workflow node [%+v] with err [%+v]", nCtx.NodeExecutionMetadata().GetNodeExecutionID(), err) } } - if recovered != nil && recovered.Closure != nil && recovered.Closure.Phase == core.NodeExecution_SUCCEEDED { - if recovered.Closure.GetWorkflowNodeMetadata() != nil { - launchCtx.RecoveryExecution = recovered.Closure.GetWorkflowNodeMetadata().ExecutionId + if recovered != nil && recovered.GetClosure() != nil && recovered.GetClosure().GetPhase() == core.NodeExecution_SUCCEEDED { + if recovered.GetClosure().GetWorkflowNodeMetadata() != nil { + launchCtx.RecoveryExecution = recovered.GetClosure().GetWorkflowNodeMetadata().GetExecutionId() } else { - logger.Debugf(ctx, "Attempted to recovered workflow node execution [%+v] but was missing workflow node metadata", recovered.Id) + logger.Debugf(ctx, "Attempted to recovered workflow node execution [%+v] but was missing workflow node metadata", recovered.GetId()) } } } @@ -106,7 +106,7 @@ func (l *launchPlanHandler) StartLaunchPlan(ctx context.Context, nCtx interfaces nodeInputs, nCtx.NodeExecutionMetadata().GetOwnerID().String()) if err != nil { if launchplan.IsAlreadyExists(err) { - logger.Infof(ctx, "Execution already exists [%s].", childID.Name) + logger.Infof(ctx, "Execution already exists [%s].", childID.GetName()) } else if launchplan.IsUserError(err) { return handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoFailure(core.ExecutionError_USER, errors.RuntimeExecutionError, err.Error(), nil)), nil } else { @@ -114,7 +114,7 @@ func (l *launchPlanHandler) StartLaunchPlan(ctx context.Context, nCtx interfaces } } else { eCtx := nCtx.ExecutionContext() - logger.Infof(ctx, "Launched launchplan with ID [%s], Parallelism is now set to [%d]", childID.Name, eCtx.IncrementParallelism()) + logger.Infof(ctx, "Launched launchplan with ID [%s], Parallelism is now set to [%d]", childID.GetName(), eCtx.IncrementParallelism()) } return handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoRunning(&handler.ExecutionInfo{ @@ -180,7 +180,7 @@ func (l *launchPlanHandler) CheckLaunchPlanStatus(ctx context.Context, nCtx inte switch wfStatusClosure.GetPhase() { case core.WorkflowExecution_ABORTED: wErr = fmt.Errorf("launchplan execution aborted") - err = errors.Wrapf(errors.RemoteChildWorkflowExecutionFailed, nCtx.NodeID(), wErr, "launchplan [%s] aborted", childID.Name) + err = errors.Wrapf(errors.RemoteChildWorkflowExecutionFailed, nCtx.NodeID(), wErr, "launchplan [%s] aborted", childID.GetName()) return handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoFailure(core.ExecutionError_USER, errors.RemoteChildWorkflowExecutionFailed, err.Error(), &handler.ExecutionInfo{ WorkflowNodeInfo: &handler.WorkflowNodeInfo{LaunchedWorkflowID: childID}, })), nil diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go index 1ce0568bf6..91709b411d 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go @@ -51,7 +51,7 @@ func (e executionCacheItem) IsTerminal() bool { if e.ExecutionClosure == nil { return false } - return e.ExecutionClosure.Phase == core.WorkflowExecution_ABORTED || e.ExecutionClosure.Phase == core.WorkflowExecution_FAILED || e.ExecutionClosure.Phase == core.WorkflowExecution_SUCCEEDED + return e.ExecutionClosure.GetPhase() == core.WorkflowExecution_ABORTED || e.ExecutionClosure.GetPhase() == core.WorkflowExecution_FAILED || e.ExecutionClosure.GetPhase() == core.WorkflowExecution_SUCCEEDED } func (e executionCacheItem) ID() string { @@ -63,7 +63,7 @@ func (a *adminLaunchPlanExecutor) handleLaunchError(ctx context.Context, isRecov statusCode := status.Code(err) if isRecovery && statusCode == codes.NotFound { - logger.Warnf(ctx, "failed to recover workflow [%s] with err %+v. will attempt to launch instead", launchPlanRef.Name, err) + logger.Warnf(ctx, "failed to recover workflow [%s] with err %+v. will attempt to launch instead", launchPlanRef.GetName(), err) return nil } switch statusCode { @@ -73,9 +73,9 @@ func (a *adminLaunchPlanExecutor) handleLaunchError(ctx context.Context, isRecov logger.Errorf(ctx, "Failed to add ExecID [%v] to auto refresh cache", executionID) } - return stdErr.Wrapf(RemoteErrorAlreadyExists, err, "ExecID %s already exists", executionID.Name) + return stdErr.Wrapf(RemoteErrorAlreadyExists, err, "ExecID %s already exists", executionID.GetName()) case codes.DataLoss, codes.DeadlineExceeded, codes.Internal, codes.Unknown, codes.Canceled: - return stdErr.Wrapf(RemoteErrorSystem, err, "failed to launch workflow [%s], system error", launchPlanRef.Name) + return stdErr.Wrapf(RemoteErrorSystem, err, "failed to launch workflow [%s], system error", launchPlanRef.GetName()) default: return stdErr.Wrapf(RemoteErrorUser, err, "failed to launch workflow") } @@ -88,7 +88,7 @@ func (a *adminLaunchPlanExecutor) Launch(ctx context.Context, launchCtx LaunchCo if launchCtx.RecoveryExecution != nil { _, err = a.adminClient.RecoverExecution(ctx, &admin.ExecutionRecoverRequest{ Id: launchCtx.RecoveryExecution, - Name: executionID.Name, + Name: executionID.GetName(), Metadata: &admin.ExecutionMetadata{ ParentNodeExecution: launchCtx.ParentNodeExecution, }, @@ -128,9 +128,9 @@ func (a *adminLaunchPlanExecutor) Launch(ctx context.Context, launchCtx LaunchCo } req := &admin.ExecutionCreateRequest{ - Project: executionID.Project, - Domain: executionID.Domain, - Name: executionID.Name, + Project: executionID.GetProject(), + Domain: executionID.GetDomain(), + Name: executionID.GetName(), Inputs: inputs, Spec: &admin.ExecutionSpec{ LaunchPlan: launchPlanRef, @@ -143,7 +143,7 @@ func (a *adminLaunchPlanExecutor) Launch(ctx context.Context, launchCtx LaunchCo Labels: &admin.Labels{Values: labels}, Annotations: &admin.Annotations{Values: launchCtx.Annotations}, SecurityContext: &launchCtx.SecurityContext, - MaxParallelism: int32(launchCtx.MaxParallelism), + MaxParallelism: int32(launchCtx.MaxParallelism), // #nosec G115 RawOutputDataConfig: launchCtx.RawOutputDataConfig, Interruptible: interruptible, OverwriteCache: launchCtx.OverwriteCache, @@ -235,8 +235,8 @@ func (a *adminLaunchPlanExecutor) syncItem(ctx context.Context, batch cache.Batc // Is workflow already terminated, then no need to fetch information, also the item can be dropped from the cache if exec.ExecutionClosure != nil { - if IsWorkflowTerminated(exec.ExecutionClosure.Phase) { - logger.Debugf(ctx, "Workflow [%s] is already completed, will not fetch execution information", exec.ExecutionClosure.WorkflowId) + if IsWorkflowTerminated(exec.ExecutionClosure.GetPhase()) { + logger.Debugf(ctx, "Workflow [%s] is already completed, will not fetch execution information", exec.ExecutionClosure.GetWorkflowId()) resp = append(resp, cache.ItemSyncResponse{ ID: obj.GetID(), Item: exec, @@ -256,7 +256,7 @@ func (a *adminLaunchPlanExecutor) syncItem(ctx context.Context, batch cache.Batc // TODO: Define which error codes are system errors (and return the error) vs user stdErr. if status.Code(err) == codes.NotFound { - err = stdErr.Wrapf(RemoteErrorNotFound, err, "execID [%s] not found on remote", exec.WorkflowExecutionIdentifier.Name) + err = stdErr.Wrapf(RemoteErrorNotFound, err, "execID [%s] not found on remote", exec.WorkflowExecutionIdentifier.GetName()) } else { err = stdErr.Wrapf(RemoteErrorSystem, err, "system error") } @@ -315,7 +315,7 @@ func (a *adminLaunchPlanExecutor) syncItem(ctx context.Context, batch cache.Batc ID: obj.GetID(), Item: executionCacheItem{ WorkflowExecutionIdentifier: exec.WorkflowExecutionIdentifier, - ExecutionClosure: res.Closure, + ExecutionClosure: res.GetClosure(), ExecutionOutputs: outputs, ParentWorkflowID: exec.ParentWorkflowID, }, @@ -327,7 +327,7 @@ func (a *adminLaunchPlanExecutor) syncItem(ctx context.Context, batch cache.Batc // prematurely, there is a chance the parent workflow evaluates before the cache is updated. for _, itemSyncResponse := range resp { exec := itemSyncResponse.Item.(executionCacheItem) - if exec.ExecutionClosure != nil && IsWorkflowTerminated(exec.ExecutionClosure.Phase) { + if exec.ExecutionClosure != nil && IsWorkflowTerminated(exec.ExecutionClosure.GetPhase()) { a.enqueueWorkflow(exec.ParentWorkflowID) } } @@ -344,7 +344,8 @@ func NewAdminLaunchPlanExecutor(_ context.Context, client service.AdminServiceCl } rateLimiter := &workqueue.BucketRateLimiter{Limiter: rate.NewLimiter(rate.Limit(cfg.TPS), cfg.Burst)} - c, err := cache.NewAutoRefreshCache("admin-launcher", exec.syncItem, rateLimiter, cfg.CacheResyncDuration.Duration, cfg.Workers, cfg.MaxCacheSize, scope) + // #nosec G115 + c, err := cache.NewAutoRefreshCache("admin-launcher", exec.syncItem, rateLimiter, cfg.CacheResyncDuration.Duration, uint(cfg.Workers), uint(cfg.MaxCacheSize), scope) if err != nil { return nil, err } diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go index ead1312e17..cf60cc85d8 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go @@ -61,7 +61,7 @@ func TestAdminLaunchPlanExecutor_GetStatus(t *testing.T) { mockClient.On("CreateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionCreateRequest) bool { - return o.Project == "p" && o.Domain == "d" && o.Name == "n" && o.Spec.Inputs == nil + return o.GetProject() == "p" && o.GetDomain() == "d" && o.GetName() == "n" && o.GetSpec().GetInputs() == nil }), ).Return(nil, nil) @@ -108,7 +108,7 @@ func TestAdminLaunchPlanExecutor_GetStatus(t *testing.T) { mockClient.On("CreateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionCreateRequest) bool { - return o.Project == "p" && o.Domain == "d" && o.Name == "n" && o.Spec.Inputs == nil + return o.GetProject() == "p" && o.GetDomain() == "d" && o.GetName() == "n" && o.GetSpec().GetInputs() == nil }), ).Return(nil, nil) @@ -170,9 +170,9 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { mockClient.On("CreateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionCreateRequest) bool { - return o.Project == "p" && o.Domain == "d" && o.Name == "n" && o.Spec.Inputs == nil && - o.Spec.Metadata.Mode == admin.ExecutionMetadata_CHILD_WORKFLOW && - reflect.DeepEqual(o.Spec.Labels.Values, map[string]string{"foo": "bar"}) // Ensure shard-key was removed. + return o.GetProject() == "p" && o.GetDomain() == "d" && o.GetName() == "n" && o.GetSpec().GetInputs() == nil && + o.GetSpec().GetMetadata().GetMode() == admin.ExecutionMetadata_CHILD_WORKFLOW && + reflect.DeepEqual(o.GetSpec().GetLabels().GetValues(), map[string]string{"foo": "bar"}) // Ensure shard-key was removed. }), ).Return(nil, nil) assert.NoError(t, err) @@ -216,8 +216,8 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { mockClient.On("RecoverExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionRecoverRequest) bool { - return o.Id.Project == "p" && o.Id.Domain == "d" && o.Id.Name == "w" && o.Name == "n" && - proto.Equal(o.Metadata.ParentNodeExecution, parentNodeExecution) + return o.GetId().GetProject() == "p" && o.GetId().GetDomain() == "d" && o.GetId().GetName() == "w" && o.GetName() == "n" && + proto.Equal(o.GetMetadata().GetParentNodeExecution(), parentNodeExecution) }), ).Return(nil, nil) assert.NoError(t, err) @@ -256,8 +256,8 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { mockClient.On("RecoverExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionRecoverRequest) bool { - return o.Id.Project == "p" && o.Id.Domain == "d" && o.Id.Name == "w" && o.Name == "n" && - proto.Equal(o.Metadata.ParentNodeExecution, parentNodeExecution) + return o.GetId().GetProject() == "p" && o.GetId().GetDomain() == "d" && o.GetId().GetName() == "w" && o.GetName() == "n" && + proto.Equal(o.GetMetadata().GetParentNodeExecution(), parentNodeExecution) }), ).Return(nil, recoveryErr) @@ -266,8 +266,8 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { ctx, mock.MatchedBy(func(o *admin.ExecutionCreateRequest) bool { createCalled = true - return o.Project == "p" && o.Domain == "d" && o.Name == "n" && o.Spec.Inputs == nil && - o.Spec.Metadata.Mode == admin.ExecutionMetadata_CHILD_WORKFLOW + return o.GetProject() == "p" && o.GetDomain() == "d" && o.GetName() == "n" && o.GetSpec().GetInputs() == nil && + o.GetSpec().GetMetadata().GetMode() == admin.ExecutionMetadata_CHILD_WORKFLOW }), ).Return(nil, nil) @@ -367,7 +367,7 @@ func TestAdminLaunchPlanExecutor_Kill(t *testing.T) { exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, adminConfig, promutils.NewTestScope(), memStore, func(string) {}) mockClient.On("TerminateExecution", ctx, - mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.Id == id && o.Cause == reason }), + mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.GetId() == id && o.GetCause() == reason }), ).Return(&admin.ExecutionTerminateResponse{}, nil) assert.NoError(t, err) err = exec.Kill(ctx, id, reason) @@ -380,7 +380,7 @@ func TestAdminLaunchPlanExecutor_Kill(t *testing.T) { exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, adminConfig, promutils.NewTestScope(), memStore, func(string) {}) mockClient.On("TerminateExecution", ctx, - mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.Id == id && o.Cause == reason }), + mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.GetId() == id && o.GetCause() == reason }), ).Return(nil, status.Error(codes.NotFound, "")) assert.NoError(t, err) err = exec.Kill(ctx, id, reason) @@ -393,7 +393,7 @@ func TestAdminLaunchPlanExecutor_Kill(t *testing.T) { exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, adminConfig, promutils.NewTestScope(), memStore, func(string) {}) mockClient.On("TerminateExecution", ctx, - mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.Id == id && o.Cause == reason }), + mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.GetId() == id && o.GetCause() == reason }), ).Return(nil, status.Error(codes.Canceled, "")) assert.NoError(t, err) err = exec.Kill(ctx, id, reason) @@ -426,7 +426,7 @@ func TestNewAdminLaunchPlanExecutor_GetLaunchPlan(t *testing.T) { ).Return(&admin.LaunchPlan{Id: id}, nil) lp, err := exec.GetLaunchPlan(ctx, id) assert.NoError(t, err) - assert.Equal(t, lp.Id, id) + assert.Equal(t, lp.GetId(), id) }) t.Run("launch plan not found", func(t *testing.T) { diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/noop.go b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/noop.go index 3f7444788d..4c5873cc4d 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/noop.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/noop.go @@ -19,12 +19,12 @@ type failFastWorkflowLauncher struct { func (failFastWorkflowLauncher) Launch(ctx context.Context, launchCtx LaunchContext, executionID *core.WorkflowExecutionIdentifier, launchPlanRef *core.Identifier, inputs *core.LiteralMap, parentWorkflowID v1alpha1.WorkflowID) error { - logger.Infof(ctx, "Fail: Launch Workflow requested with ExecID [%s], LaunchPlan [%s]", executionID.Name, fmt.Sprintf("%s:%s:%s", launchPlanRef.Project, launchPlanRef.Domain, launchPlanRef.Name)) + logger.Infof(ctx, "Fail: Launch Workflow requested with ExecID [%s], LaunchPlan [%s]", executionID.GetName(), fmt.Sprintf("%s:%s:%s", launchPlanRef.GetProject(), launchPlanRef.GetDomain(), launchPlanRef.GetName())) return errors.Wrapf(RemoteErrorUser, fmt.Errorf("badly configured system"), "please enable admin workflow launch to use launchplans") } func (failFastWorkflowLauncher) GetStatus(ctx context.Context, executionID *core.WorkflowExecutionIdentifier) (*admin.ExecutionClosure, *core.LiteralMap, error) { - logger.Infof(ctx, "NOOP: Workflow Status ExecID [%s]", executionID.Name) + logger.Infof(ctx, "NOOP: Workflow Status ExecID [%s]", executionID.GetName()) return nil, nil, errors.Wrapf(RemoteErrorUser, fmt.Errorf("badly configured system"), "please enable admin workflow launch to use launchplans") } diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan_test.go b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan_test.go index 68b5383b78..62445d5efa 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan_test.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan_test.go @@ -77,11 +77,11 @@ func TestSubWorkflowHandler_StartLaunchPlan(t *testing.T) { mockLPExec.On("Launch", ctx, mock.MatchedBy(func(o launchplan.LaunchContext) bool { - return o.ParentNodeExecution.NodeId == mockNode.GetID() && - o.ParentNodeExecution.ExecutionId == wfExecID + return o.ParentNodeExecution.GetNodeId() == mockNode.GetID() && + o.ParentNodeExecution.GetExecutionId() == wfExecID }), mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.MatchedBy(func(o *core.Identifier) bool { return lpID == o }), mock.MatchedBy(func(o *core.LiteralMap) bool { return o.Literals == nil }), @@ -107,11 +107,11 @@ func TestSubWorkflowHandler_StartLaunchPlan(t *testing.T) { mockLPExec.On("Launch", ctx, mock.MatchedBy(func(o launchplan.LaunchContext) bool { - return o.ParentNodeExecution.NodeId == mockNode.GetID() && - o.ParentNodeExecution.ExecutionId == wfExecID + return o.ParentNodeExecution.GetNodeId() == mockNode.GetID() && + o.ParentNodeExecution.GetExecutionId() == wfExecID }), mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.MatchedBy(func(o *core.Identifier) bool { return lpID == o }), mock.MatchedBy(func(o *core.LiteralMap) bool { return o.Literals == nil }), @@ -134,11 +134,11 @@ func TestSubWorkflowHandler_StartLaunchPlan(t *testing.T) { mockLPExec.On("Launch", ctx, mock.MatchedBy(func(o launchplan.LaunchContext) bool { - return o.ParentNodeExecution.NodeId == mockNode.GetID() && - o.ParentNodeExecution.ExecutionId == wfExecID + return o.ParentNodeExecution.GetNodeId() == mockNode.GetID() && + o.ParentNodeExecution.GetExecutionId() == wfExecID }), mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.MatchedBy(func(o *core.Identifier) bool { return lpID == o }), mock.MatchedBy(func(o *core.LiteralMap) bool { return o.Literals == nil }), @@ -161,11 +161,11 @@ func TestSubWorkflowHandler_StartLaunchPlan(t *testing.T) { mockLPExec.On("Launch", ctx, mock.MatchedBy(func(o launchplan.LaunchContext) bool { - return o.ParentNodeExecution.NodeId == mockNode.GetID() && - o.ParentNodeExecution.ExecutionId == wfExecID + return o.ParentNodeExecution.GetNodeId() == mockNode.GetID() && + o.ParentNodeExecution.GetExecutionId() == wfExecID }), mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.MatchedBy(func(o *core.Identifier) bool { return lpID == o }), mock.MatchedBy(func(o *core.LiteralMap) bool { return o.Literals == nil }), @@ -216,11 +216,11 @@ func TestSubWorkflowHandler_StartLaunchPlan(t *testing.T) { mockLPExec.On("Launch", ctx, mock.MatchedBy(func(o launchplan.LaunchContext) bool { - return o.ParentNodeExecution.NodeId == mockNode.GetID() && - o.ParentNodeExecution.ExecutionId == wfExecID + return o.ParentNodeExecution.GetNodeId() == mockNode.GetID() && + o.ParentNodeExecution.GetExecutionId() == wfExecID }), mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.MatchedBy(func(o *core.Identifier) bool { return lpID == o }), mock.MatchedBy(func(o *core.LiteralMap) bool { return o.Literals == nil }), @@ -317,7 +317,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_RUNNING, @@ -340,7 +340,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_SUCCEEDED, @@ -375,7 +375,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_SUCCEEDED, @@ -417,7 +417,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_SUCCEEDED, @@ -454,7 +454,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_FAILED, @@ -484,7 +484,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_FAILED, @@ -508,7 +508,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_ABORTED, @@ -532,7 +532,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(nil, &core.LiteralMap{}, errors.Wrapf(launchplan.RemoteErrorNotFound, fmt.Errorf("some error"), "not found")) @@ -554,7 +554,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(nil, &core.LiteralMap{}, errors.Wrapf(launchplan.RemoteErrorSystem, fmt.Errorf("some error"), "not found")) @@ -582,7 +582,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_SUCCEEDED, @@ -616,7 +616,7 @@ func TestSubWorkflowHandler_CheckLaunchPlanStatus(t *testing.T) { mockLPExec.On("GetStatus", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), ).Return(&admin.ExecutionClosure{ Phase: core.WorkflowExecution_SUCCEEDED, @@ -670,7 +670,7 @@ func TestLaunchPlanHandler_HandleAbort(t *testing.T) { mockLPExec.On("Kill", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.AnythingOfType(reflect.String.String()), ).Return(nil) @@ -692,7 +692,7 @@ func TestLaunchPlanHandler_HandleAbort(t *testing.T) { mockLPExec.On("Kill", ctx, mock.MatchedBy(func(o *core.WorkflowExecutionIdentifier) bool { - return assert.Equal(t, wfExecID.Project, o.Project) && assert.Equal(t, wfExecID.Domain, o.Domain) + return assert.Equal(t, wfExecID.GetProject(), o.GetProject()) && assert.Equal(t, wfExecID.GetDomain(), o.GetDomain()) }), mock.AnythingOfType(reflect.String.String()), ).Return(expectedErr) diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/util.go b/flytepropeller/pkg/controller/nodes/subworkflow/util.go index ae23439c97..8c119a2175 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/util.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/util.go @@ -10,21 +10,21 @@ import ( const maxLengthForSubWorkflow = 20 func GetChildWorkflowExecutionID(nodeExecID *core.NodeExecutionIdentifier, attempt uint32) (*core.WorkflowExecutionIdentifier, error) { - name, err := encoding.FixedLengthUniqueIDForParts(maxLengthForSubWorkflow, []string{nodeExecID.ExecutionId.Name, nodeExecID.NodeId, strconv.Itoa(int(attempt))}) + name, err := encoding.FixedLengthUniqueIDForParts(maxLengthForSubWorkflow, []string{nodeExecID.GetExecutionId().GetName(), nodeExecID.GetNodeId(), strconv.Itoa(int(attempt))}) if err != nil { return nil, err } // Restriction on name is 20 chars return &core.WorkflowExecutionIdentifier{ - Project: nodeExecID.ExecutionId.Project, - Domain: nodeExecID.ExecutionId.Domain, + Project: nodeExecID.GetExecutionId().GetProject(), + Domain: nodeExecID.GetExecutionId().GetDomain(), Name: name, }, nil } func GetChildWorkflowExecutionIDV2(nodeExecID *core.NodeExecutionIdentifier, attempt uint32) (*core.WorkflowExecutionIdentifier, error) { - name, err := encoding.FixedLengthUniqueIDForParts(maxLengthForSubWorkflow, []string{nodeExecID.ExecutionId.Name, nodeExecID.NodeId, strconv.Itoa(int(attempt))}, + name, err := encoding.FixedLengthUniqueIDForParts(maxLengthForSubWorkflow, []string{nodeExecID.GetExecutionId().GetName(), nodeExecID.GetNodeId(), strconv.Itoa(int(attempt))}, encoding.NewAlgorithmOption(encoding.Algorithm64)) if err != nil { return nil, err @@ -32,8 +32,8 @@ func GetChildWorkflowExecutionIDV2(nodeExecID *core.NodeExecutionIdentifier, att // Restriction on name is 20 chars return &core.WorkflowExecutionIdentifier{ - Project: nodeExecID.ExecutionId.Project, - Domain: nodeExecID.ExecutionId.Domain, + Project: nodeExecID.GetExecutionId().GetProject(), + Domain: nodeExecID.GetExecutionId().GetDomain(), Name: name, }, nil } diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/util_test.go b/flytepropeller/pkg/controller/nodes/subworkflow/util_test.go index f1df02deb6..96b93b8f8a 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/util_test.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/util_test.go @@ -19,6 +19,6 @@ func TestGetChildWorkflowExecutionID(t *testing.T) { }, }, 1) - assert.Equal(t, id.Name, "fav2uxxi") + assert.Equal(t, id.GetName(), "fav2uxxi") assert.NoError(t, err) } diff --git a/flytepropeller/pkg/controller/nodes/task/cache.go b/flytepropeller/pkg/controller/nodes/task/cache.go index fab3cd4d61..d408a5af85 100644 --- a/flytepropeller/pkg/controller/nodes/task/cache.go +++ b/flytepropeller/pkg/controller/nodes/task/cache.go @@ -26,10 +26,10 @@ func (t *Handler) GetCatalogKey(ctx context.Context, nCtx interfaces.NodeExecuti } return catalog.Key{ - Identifier: *taskTemplate.Id, - CacheVersion: taskTemplate.Metadata.DiscoveryVersion, - CacheIgnoreInputVars: taskTemplate.Metadata.CacheIgnoreInputVars, - TypedInterface: *taskTemplate.Interface, + Identifier: *taskTemplate.Id, //nolint:protogetter + CacheVersion: taskTemplate.GetMetadata().GetDiscoveryVersion(), + CacheIgnoreInputVars: taskTemplate.GetMetadata().GetCacheIgnoreInputVars(), + TypedInterface: *taskTemplate.GetInterface(), InputReader: nCtx.InputReader(), }, nil } @@ -62,5 +62,5 @@ func (t *Handler) IsCacheable(ctx context.Context, nCtx interfaces.NodeExecution return false, false, err } - return taskTemplate.Metadata.Discoverable, taskTemplate.Metadata.Discoverable && taskTemplate.Metadata.CacheSerializable, nil + return taskTemplate.GetMetadata().GetDiscoverable(), taskTemplate.GetMetadata().GetDiscoverable() && taskTemplate.GetMetadata().GetCacheSerializable(), nil } diff --git a/flytepropeller/pkg/controller/nodes/task/handler.go b/flytepropeller/pkg/controller/nodes/task/handler.go index 2adea27312..000d6bd7e7 100644 --- a/flytepropeller/pkg/controller/nodes/task/handler.go +++ b/flytepropeller/pkg/controller/nodes/task/handler.go @@ -434,6 +434,7 @@ func (t Handler) invokePlugin(ctx context.Context, p pluginCore.Plugin, tCtx *ta pluginTrns.TransitionPreviouslyRecorded() return pluginTrns, nil } + // #nosec G115 if pluginTrns.pInfo.Version() > uint32(t.cfg.MaxPluginPhaseVersions) { logger.Errorf(ctx, "Too many Plugin p versions for plugin [%s]. p versions [%d/%d]", p.GetID(), pluginTrns.pInfo.Version(), t.cfg.MaxPluginPhaseVersions) pluginTrns.ObservedExecutionError(&io.ExecutionError{ @@ -565,7 +566,7 @@ func (t Handler) Handle(ctx context.Context, nCtx interfaces.NodeExecutionContex logger.Errorf(ctx, "failed to read TaskTemplate, error :%s", err.Error()) return handler.UnknownTransition, err } - if tk.Interface != nil && tk.Interface.Inputs != nil && len(tk.Interface.Inputs.Variables) > 0 { + if tk.GetInterface() != nil && tk.GetInterface().GetInputs() != nil && len(tk.GetInterface().GetInputs().GetVariables()) > 0 { inputs, err = nCtx.InputReader().Get(ctx) if err != nil { logger.Errorf(ctx, "failed to read inputs when checking catalog cache %w", err) @@ -577,7 +578,7 @@ func (t Handler) Handle(ctx context.Context, nCtx interfaces.NodeExecutionContex occurredAt := time.Now() // STEP 2: If no cache-hit and not transitioning to PhaseWaitingForCache, then lets invoke the plugin and wait for a transition out of undefined if pluginTrns.execInfo.TaskNodeInfo == nil || (pluginTrns.pInfo.Phase() != pluginCore.PhaseWaitingForCache && - pluginTrns.execInfo.TaskNodeInfo.TaskNodeMetadata.CacheStatus != core.CatalogCacheStatus_CACHE_HIT) { + pluginTrns.execInfo.TaskNodeInfo.TaskNodeMetadata.GetCacheStatus() != core.CatalogCacheStatus_CACHE_HIT) { var err error pluginTrns, err = t.invokePlugin(ctx, p, tCtx, ts) @@ -624,7 +625,7 @@ func (t Handler) Handle(ctx context.Context, nCtx interfaces.NodeExecutionContex return handler.UnknownTransition, err } if err := nCtx.EventsRecorder().RecordTaskEvent(ctx, evInfo, t.eventConfig); err != nil { - logger.Errorf(ctx, "Event recording failed for Plugin [%s], eventPhase [%s], error :%s", p.GetID(), evInfo.Phase.String(), err.Error()) + logger.Errorf(ctx, "Event recording failed for Plugin [%s], eventPhase [%s], error :%s", p.GetID(), evInfo.GetPhase().String(), err.Error()) // Check for idempotency // Check for terminate state error return handler.UnknownTransition, err @@ -694,8 +695,8 @@ func (t *Handler) ValidateOutput(ctx context.Context, nodeID v1alpha1.NodeID, i return nil, err } - iface := tk.Interface - outputsDeclared := iface != nil && iface.Outputs != nil && len(iface.Outputs.Variables) > 0 + iface := tk.GetInterface() + outputsDeclared := iface != nil && iface.GetOutputs() != nil && len(iface.GetOutputs().GetVariables()) > 0 if r == nil { if outputsDeclared { @@ -838,7 +839,7 @@ func (t Handler) Abort(ctx context.Context, nCtx interfaces.NodeExecutionContext evInfo.Phase = core.TaskExecution_ABORTED } if err := evRecorder.RecordTaskEvent(ctx, evInfo, t.eventConfig); err != nil { - logger.Errorf(ctx, "Event recording failed for Plugin [%s], eventPhase [%s], error :%s", p.GetID(), evInfo.Phase.String(), err.Error()) + logger.Errorf(ctx, "Event recording failed for Plugin [%s], eventPhase [%s], error :%s", p.GetID(), evInfo.GetPhase().String(), err.Error()) // Check for idempotency // Check for terminate state error return err diff --git a/flytepropeller/pkg/controller/nodes/task/handler_test.go b/flytepropeller/pkg/controller/nodes/task/handler_test.go index 52b937cb90..62e64c02f3 100644 --- a/flytepropeller/pkg/controller/nodes/task/handler_test.go +++ b/flytepropeller/pkg/controller/nodes/task/handler_test.go @@ -711,6 +711,7 @@ func Test_task_Handle_NoCatalog(t *testing.T) { t.Run(tt.name, func(t *testing.T) { state := &taskNodeStateHolder{} ev := &fakeBufferedEventRecorder{} + // #nosec G115 nCtx := createNodeContext(tt.args.startingPluginPhase, uint32(tt.args.startingPluginPhaseVersion), tt.args.expectedState, ev, "test", state, tt.want.incrParallel) c := &pluginCatalogMocks.Client{} tk := Handler{ @@ -735,11 +736,11 @@ func Test_task_Handle_NoCatalog(t *testing.T) { if tt.want.event { if assert.Equal(t, 1, len(ev.evs)) { e := ev.evs[0] - assert.Equal(t, tt.want.eventPhase.String(), e.Phase.String()) + assert.Equal(t, tt.want.eventPhase.String(), e.GetPhase().String()) if tt.args.expectedState.TaskInfo != nil { - assert.Equal(t, tt.args.expectedState.TaskInfo.Logs, e.Logs) + assert.Equal(t, tt.args.expectedState.TaskInfo.Logs, e.GetLogs()) } - if e.Phase == core.TaskExecution_RUNNING || e.Phase == core.TaskExecution_SUCCEEDED { + if e.GetPhase() == core.TaskExecution_RUNNING || e.GetPhase() == core.TaskExecution_SUCCEEDED { assert.True(t, proto.Equal(inputs, e.GetInputData())) } } @@ -761,11 +762,11 @@ func Test_task_Handle_NoCatalog(t *testing.T) { assert.Equal(t, tt.args.expectedState.PhaseVersion, state.s.PluginPhaseVersion) if tt.want.checkpoint { assert.Equal(t, "s3://sandbox/x/name-n1-1/_flytecheckpoints", - got.Info().GetInfo().TaskNodeInfo.TaskNodeMetadata.CheckpointUri) + got.Info().GetInfo().TaskNodeInfo.TaskNodeMetadata.GetCheckpointUri()) } else { assert.True(t, got.Info().GetInfo() == nil || got.Info().GetInfo().TaskNodeInfo == nil || got.Info().GetInfo().TaskNodeInfo.TaskNodeMetadata == nil || - len(got.Info().GetInfo().TaskNodeInfo.TaskNodeMetadata.CheckpointUri) == 0) + len(got.Info().GetInfo().TaskNodeInfo.TaskNodeMetadata.GetCheckpointUri()) == 0) } } }) diff --git a/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go b/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go index bb987acbc2..3b5cd3a147 100644 --- a/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go +++ b/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go @@ -50,8 +50,8 @@ func newTaskExecutionMetadata(tCtx pluginsCore.TaskExecutionMetadata, taskTmpl * var err error secretsMap := make(map[string]string) injectLabels := make(map[string]string) - if taskTmpl.SecurityContext != nil && len(taskTmpl.SecurityContext.Secrets) > 0 { - secretsMap, err = secrets.MarshalSecretsToMapStrings(taskTmpl.SecurityContext.Secrets) + if taskTmpl.GetSecurityContext() != nil && len(taskTmpl.GetSecurityContext().GetSecrets()) > 0 { + secretsMap, err = secrets.MarshalSecretsToMapStrings(taskTmpl.GetSecurityContext().GetSecrets()) if err != nil { return TaskExecutionMetadata{}, err } @@ -59,7 +59,7 @@ func newTaskExecutionMetadata(tCtx pluginsCore.TaskExecutionMetadata, taskTmpl * injectLabels[secrets.PodLabel] = secrets.PodLabelValue } - id := tCtx.GetSecurityContext().RunAs.ExecutionIdentity + id := tCtx.GetSecurityContext().RunAs.GetExecutionIdentity() //nolint:protogetter if len(id) > 0 { sanitizedID := k8sUtils.SanitizeLabelValue(id) injectLabels[executionIdentityVariable] = sanitizedID diff --git a/flytepropeller/pkg/controller/nodes/task/plugin_state_manager.go b/flytepropeller/pkg/controller/nodes/task/plugin_state_manager.go index 3613fec916..52edbb3d70 100644 --- a/flytepropeller/pkg/controller/nodes/task/plugin_state_manager.go +++ b/flytepropeller/pkg/controller/nodes/task/plugin_state_manager.go @@ -75,7 +75,7 @@ func newPluginStateManager(_ context.Context, prevCodecVersion CodecVersion, pre return &pluginStateManager{ codec: codex.GobStateCodec{}, codecVersion: GobCodecVersion, - prevStateVersion: uint8(prevStateVersion), + prevStateVersion: uint8(prevStateVersion), // #nosec G115 prevState: prevState, }, nil } diff --git a/flytepropeller/pkg/controller/nodes/task/secretmanager/secrets.go b/flytepropeller/pkg/controller/nodes/task/secretmanager/secrets.go index 230017d7d3..7ffbfff9f2 100644 --- a/flytepropeller/pkg/controller/nodes/task/secretmanager/secrets.go +++ b/flytepropeller/pkg/controller/nodes/task/secretmanager/secrets.go @@ -50,18 +50,18 @@ func (f FileEnvSecretManager) Get(ctx context.Context, key string) (string, erro // Prefix+SecretGroup+_+SecretKey. If the secret is not found in environment, it'll lookup the secret from files using // the configured SecretPath / SecretGroup / SecretKey. func (f FileEnvSecretManager) GetForSecret(ctx context.Context, secret *coreIdl.Secret) (string, error) { - if len(secret.Group) == 0 || len(secret.Key) == 0 { + if len(secret.GetGroup()) == 0 || len(secret.GetKey()) == 0 { return "", fmt.Errorf("both key and group are required parameters. Secret: [%v]", secret.String()) } - envVar := fmt.Sprintf(envVarLookupFormatter, f.envPrefix, strings.ToUpper(secret.Group), strings.ToUpper(secret.Key)) + envVar := fmt.Sprintf(envVarLookupFormatter, f.envPrefix, strings.ToUpper(secret.GetGroup()), strings.ToUpper(secret.GetKey())) v, ok := os.LookupEnv(envVar) if ok { logger.Debugf(ctx, "Secret found %s", v) return v, nil } - secretFile := filepath.Join(f.secretPath, filepath.Join(secret.Group, secret.Key)) + secretFile := filepath.Join(f.secretPath, filepath.Join(secret.GetGroup(), secret.GetKey())) if _, err := os.Stat(secretFile); err != nil { if os.IsNotExist(err) { return "", fmt.Errorf("secrets not found - Env [%s], file [%s]", envVar, secretFile) diff --git a/flytepropeller/pkg/controller/nodes/task/taskexec_context.go b/flytepropeller/pkg/controller/nodes/task/taskexec_context.go index 25b936a8e4..1f29060ca9 100644 --- a/flytepropeller/pkg/controller/nodes/task/taskexec_context.go +++ b/flytepropeller/pkg/controller/nodes/task/taskexec_context.go @@ -258,12 +258,12 @@ func (t *Handler) newTaskExecutionContext(ctx context.Context, nCtx interfaces.N length = *l } - rawOutputPrefix, uniqueID, err := ComputeRawOutputPrefix(ctx, length, nCtx, currentNodeUniqueID, id.RetryAttempt) + rawOutputPrefix, uniqueID, err := ComputeRawOutputPrefix(ctx, length, nCtx, currentNodeUniqueID, id.GetRetryAttempt()) if err != nil { return nil, err } - prevCheckpointPath, err := ComputePreviousCheckpointPath(ctx, length, nCtx, currentNodeUniqueID, id.RetryAttempt) + prevCheckpointPath, err := ComputePreviousCheckpointPath(ctx, length, nCtx, currentNodeUniqueID, id.GetRetryAttempt()) if err != nil { return nil, err } @@ -280,9 +280,9 @@ func (t *Handler) newTaskExecutionContext(ctx context.Context, nCtx interfaces.N } resourceNamespacePrefix := pluginCore.ResourceNamespace(t.resourceManager.GetID()).CreateSubNamespace(pluginCore.ResourceNamespace(plugin.GetID())) - maxAttempts := uint32(controllerconfig.GetConfig().NodeConfig.DefaultMaxAttempts) + maxAttempts := uint32(controllerconfig.GetConfig().NodeConfig.DefaultMaxAttempts) // #nosec G115 if nCtx.Node().GetRetryStrategy() != nil && nCtx.Node().GetRetryStrategy().MinAttempts != nil { - maxAttempts = uint32(*nCtx.Node().GetRetryStrategy().MinAttempts) + maxAttempts = uint32(*nCtx.Node().GetRetryStrategy().MinAttempts) // #nosec G115 } taskTemplatePath, err := ioutils.GetTaskTemplatePath(ctx, nCtx.DataStore(), nCtx.NodeStatus().GetDataDir()) diff --git a/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go b/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go index e798f82a04..9a469fd25c 100644 --- a/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go +++ b/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go @@ -167,10 +167,10 @@ func TestHandler_newTaskExecutionContext(t *testing.T) { assert.Equal(t, got.TaskExecutionMetadata().GetOverrides().GetResources(), resources) assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetGeneratedName(), "name-n1-1") - assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().TaskId, taskID) - assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().RetryAttempt, uint32(1)) - assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().NodeExecutionId.GetNodeId(), nodeID) - assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().NodeExecutionId.GetExecutionId(), wfExecID) + assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().TaskId, taskID) //nolint:protogetter + assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().RetryAttempt, uint32(1)) //nolint:protogetter + assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().NodeExecutionId.GetNodeId(), nodeID) //nolint:protogetter + assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetID().NodeExecutionId.GetExecutionId(), wfExecID) //nolint:protogetter assert.Equal(t, got.TaskExecutionMetadata().GetTaskExecutionID().GetUniqueNodeID(), nodeID) assert.EqualValues(t, got.ResourceManager().(resourcemanager.TaskResourceManager).GetResourcePoolInfo(), make([]*event.ResourcePoolInfo, 0)) diff --git a/flytepropeller/pkg/controller/nodes/task/transformer.go b/flytepropeller/pkg/controller/nodes/task/transformer.go index 242c1334ce..db3db668ea 100644 --- a/flytepropeller/pkg/controller/nodes/task/transformer.go +++ b/flytepropeller/pkg/controller/nodes/task/transformer.go @@ -57,16 +57,16 @@ func ToTaskEventPhase(p pluginCore.Phase) core.TaskExecution_Phase { func getParentNodeExecIDForTask(taskExecID *core.TaskExecutionIdentifier, execContext executors.ExecutionContext) (*core.NodeExecutionIdentifier, error) { nodeExecutionID := &core.NodeExecutionIdentifier{ - ExecutionId: taskExecID.NodeExecutionId.ExecutionId, + ExecutionId: taskExecID.GetNodeExecutionId().GetExecutionId(), } if execContext.GetEventVersion() != v1alpha1.EventVersion0 { - currentNodeUniqueID, err := common.GenerateUniqueID(execContext.GetParentInfo(), taskExecID.NodeExecutionId.NodeId) + currentNodeUniqueID, err := common.GenerateUniqueID(execContext.GetParentInfo(), taskExecID.GetNodeExecutionId().GetNodeId()) if err != nil { return nil, err } nodeExecutionID.NodeId = currentNodeUniqueID } else { - nodeExecutionID.NodeId = taskExecID.NodeExecutionId.NodeId + nodeExecutionID.NodeId = taskExecID.GetNodeExecutionId().GetNodeId() } return nodeExecutionID, nil } @@ -127,6 +127,7 @@ func ToTaskExecutionEvent(input ToTaskExecutionEventInputs) (*event.TaskExecutio Logs: e.Logs, RetryAttempt: e.RetryAttempt, Phase: phase, + CustomInfo: e.CustomInfo, } } } @@ -145,9 +146,9 @@ func ToTaskExecutionEvent(input ToTaskExecutionEventInputs) (*event.TaskExecutio }) } tev := &event.TaskExecutionEvent{ - TaskId: taskExecID.TaskId, + TaskId: taskExecID.GetTaskId(), ParentNodeExecutionId: nodeExecutionID, - RetryAttempt: taskExecID.RetryAttempt, + RetryAttempt: taskExecID.GetRetryAttempt(), Phase: ToTaskEventPhase(input.Info.Phase()), PhaseVersion: input.Info.Version(), ProducerId: input.ClusterID, diff --git a/flytepropeller/pkg/controller/nodes/task/transformer_test.go b/flytepropeller/pkg/controller/nodes/task/transformer_test.go index db89dda3e6..825b58a2ab 100644 --- a/flytepropeller/pkg/controller/nodes/task/transformer_test.go +++ b/flytepropeller/pkg/controller/nodes/task/transformer_test.go @@ -99,21 +99,21 @@ func TestToTaskExecutionEvent(t *testing.T) { }, }) assert.NoError(t, err) - assert.Nil(t, tev.Logs) - assert.Equal(t, core.TaskExecution_WAITING_FOR_RESOURCES, tev.Phase) - assert.Equal(t, uint32(0), tev.PhaseVersion) - assert.Equal(t, np, tev.OccurredAt) - assert.Equal(t, tkID, tev.TaskId) - assert.Equal(t, nodeID, tev.ParentNodeExecutionId) + assert.Nil(t, tev.GetLogs()) + assert.Equal(t, core.TaskExecution_WAITING_FOR_RESOURCES, tev.GetPhase()) + assert.Equal(t, uint32(0), tev.GetPhaseVersion()) + assert.Equal(t, np, tev.GetOccurredAt()) + assert.Equal(t, tkID, tev.GetTaskId()) + assert.Equal(t, nodeID, tev.GetParentNodeExecutionId()) assert.Equal(t, inputPath, tev.GetInputUri()) - assert.Nil(t, tev.OutputResult) - assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.Metadata.InstanceClass) - assert.Equal(t, containerTaskType, tev.TaskType) - assert.Equal(t, "reason", tev.Reason) - assert.Equal(t, containerPluginIdentifier, tev.Metadata.PluginIdentifier) - assert.Equal(t, generatedName, tev.Metadata.GeneratedName) - assert.EqualValues(t, resourcePoolInfo, tev.Metadata.ResourcePoolInfo) - assert.Equal(t, testClusterID, tev.ProducerId) + assert.Nil(t, tev.GetOutputResult()) + assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.GetMetadata().GetInstanceClass()) + assert.Equal(t, containerTaskType, tev.GetTaskType()) + assert.Equal(t, "reason", tev.GetReason()) + assert.Equal(t, containerPluginIdentifier, tev.GetMetadata().GetPluginIdentifier()) + assert.Equal(t, generatedName, tev.GetMetadata().GetGeneratedName()) + assert.EqualValues(t, resourcePoolInfo, tev.GetMetadata().GetResourcePoolInfo()) + assert.Equal(t, testClusterID, tev.GetProducerId()) l := []*core.TaskLog{ {Uri: "x", Name: "y", MessageFormat: core.TaskLog_JSON}, @@ -139,21 +139,21 @@ func TestToTaskExecutionEvent(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, core.TaskExecution_RUNNING, tev.Phase) - assert.Equal(t, uint32(1), tev.PhaseVersion) - assert.Equal(t, l, tev.Logs) - assert.Equal(t, c, tev.CustomInfo) - assert.Equal(t, np, tev.OccurredAt) - assert.Equal(t, tkID, tev.TaskId) - assert.Equal(t, nodeID, tev.ParentNodeExecutionId) + assert.Equal(t, core.TaskExecution_RUNNING, tev.GetPhase()) + assert.Equal(t, uint32(1), tev.GetPhaseVersion()) + assert.Equal(t, l, tev.GetLogs()) + assert.Equal(t, c, tev.GetCustomInfo()) + assert.Equal(t, np, tev.GetOccurredAt()) + assert.Equal(t, tkID, tev.GetTaskId()) + assert.Equal(t, nodeID, tev.GetParentNodeExecutionId()) assert.Equal(t, inputPath, tev.GetInputUri()) - assert.Nil(t, tev.OutputResult) - assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.Metadata.InstanceClass) - assert.Equal(t, containerTaskType, tev.TaskType) - assert.Equal(t, containerPluginIdentifier, tev.Metadata.PluginIdentifier) - assert.Equal(t, generatedName, tev.Metadata.GeneratedName) - assert.EqualValues(t, resourcePoolInfo, tev.Metadata.ResourcePoolInfo) - assert.Equal(t, testClusterID, tev.ProducerId) + assert.Nil(t, tev.GetOutputResult()) + assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.GetMetadata().GetInstanceClass()) + assert.Equal(t, containerTaskType, tev.GetTaskType()) + assert.Equal(t, containerPluginIdentifier, tev.GetMetadata().GetPluginIdentifier()) + assert.Equal(t, generatedName, tev.GetMetadata().GetGeneratedName()) + assert.EqualValues(t, resourcePoolInfo, tev.GetMetadata().GetResourcePoolInfo()) + assert.Equal(t, testClusterID, tev.GetProducerId()) defaultNodeExecutionMetadata := nodemocks.NodeExecutionMetadata{} defaultNodeExecutionMetadata.OnIsInterruptible().Return(false) @@ -177,23 +177,23 @@ func TestToTaskExecutionEvent(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, core.TaskExecution_SUCCEEDED, tev.Phase) - assert.Equal(t, uint32(0), tev.PhaseVersion) - assert.Equal(t, l, tev.Logs) - assert.Equal(t, c, tev.CustomInfo) - assert.Equal(t, np, tev.OccurredAt) - assert.Equal(t, np, tev.OccurredAt) - assert.Equal(t, tkID, tev.TaskId) - assert.Equal(t, nodeID, tev.ParentNodeExecutionId) - assert.NotNil(t, tev.OutputResult) + assert.Equal(t, core.TaskExecution_SUCCEEDED, tev.GetPhase()) + assert.Equal(t, uint32(0), tev.GetPhaseVersion()) + assert.Equal(t, l, tev.GetLogs()) + assert.Equal(t, c, tev.GetCustomInfo()) + assert.Equal(t, np, tev.GetOccurredAt()) + assert.Equal(t, np, tev.GetOccurredAt()) + assert.Equal(t, tkID, tev.GetTaskId()) + assert.Equal(t, nodeID, tev.GetParentNodeExecutionId()) + assert.NotNil(t, tev.GetOutputResult()) assert.Equal(t, inputPath, tev.GetInputUri()) assert.Equal(t, outputPath, tev.GetOutputUri()) - assert.Empty(t, event.TaskExecutionMetadata_DEFAULT, tev.Metadata.InstanceClass) - assert.Equal(t, containerTaskType, tev.TaskType) - assert.Equal(t, containerPluginIdentifier, tev.Metadata.PluginIdentifier) - assert.Equal(t, generatedName, tev.Metadata.GeneratedName) - assert.EqualValues(t, resourcePoolInfo, tev.Metadata.ResourcePoolInfo) - assert.Equal(t, testClusterID, tev.ProducerId) + assert.Empty(t, event.TaskExecutionMetadata_DEFAULT, tev.GetMetadata().GetInstanceClass()) + assert.Equal(t, containerTaskType, tev.GetTaskType()) + assert.Equal(t, containerPluginIdentifier, tev.GetMetadata().GetPluginIdentifier()) + assert.Equal(t, generatedName, tev.GetMetadata().GetGeneratedName()) + assert.EqualValues(t, resourcePoolInfo, tev.GetMetadata().GetResourcePoolInfo()) + assert.Equal(t, testClusterID, tev.GetProducerId()) t.Run("inline event policy", func(t *testing.T) { inputs := &core.LiteralMap{ @@ -297,21 +297,21 @@ func TestToTaskExecutionEventWithParent(t *testing.T) { expectedNodeID := &core.NodeExecutionIdentifier{ NodeId: "fmxzd5ta", } - assert.Nil(t, tev.Logs) - assert.Equal(t, core.TaskExecution_WAITING_FOR_RESOURCES, tev.Phase) - assert.Equal(t, uint32(0), tev.PhaseVersion) - assert.Equal(t, np, tev.OccurredAt) - assert.Equal(t, tkID, tev.TaskId) - assert.Equal(t, expectedNodeID, tev.ParentNodeExecutionId) + assert.Nil(t, tev.GetLogs()) + assert.Equal(t, core.TaskExecution_WAITING_FOR_RESOURCES, tev.GetPhase()) + assert.Equal(t, uint32(0), tev.GetPhaseVersion()) + assert.Equal(t, np, tev.GetOccurredAt()) + assert.Equal(t, tkID, tev.GetTaskId()) + assert.Equal(t, expectedNodeID, tev.GetParentNodeExecutionId()) assert.Equal(t, inputPath, tev.GetInputUri()) - assert.Nil(t, tev.OutputResult) - assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.Metadata.InstanceClass) - assert.Equal(t, containerTaskType, tev.TaskType) - assert.Equal(t, "reason", tev.Reason) - assert.Equal(t, containerPluginIdentifier, tev.Metadata.PluginIdentifier) - assert.Equal(t, generatedName, tev.Metadata.GeneratedName) - assert.EqualValues(t, resourcePoolInfo, tev.Metadata.ResourcePoolInfo) - assert.Equal(t, testClusterID, tev.ProducerId) + assert.Nil(t, tev.GetOutputResult()) + assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.GetMetadata().GetInstanceClass()) + assert.Equal(t, containerTaskType, tev.GetTaskType()) + assert.Equal(t, "reason", tev.GetReason()) + assert.Equal(t, containerPluginIdentifier, tev.GetMetadata().GetPluginIdentifier()) + assert.Equal(t, generatedName, tev.GetMetadata().GetGeneratedName()) + assert.EqualValues(t, resourcePoolInfo, tev.GetMetadata().GetResourcePoolInfo()) + assert.Equal(t, testClusterID, tev.GetProducerId()) l := []*core.TaskLog{ {Uri: "x", Name: "y", MessageFormat: core.TaskLog_JSON}, @@ -337,19 +337,19 @@ func TestToTaskExecutionEventWithParent(t *testing.T) { }, }) assert.NoError(t, err) - assert.Equal(t, core.TaskExecution_RUNNING, tev.Phase) - assert.Equal(t, uint32(1), tev.PhaseVersion) - assert.Equal(t, l, tev.Logs) - assert.Equal(t, c, tev.CustomInfo) - assert.Equal(t, np, tev.OccurredAt) - assert.Equal(t, tkID, tev.TaskId) - assert.Equal(t, expectedNodeID, tev.ParentNodeExecutionId) + assert.Equal(t, core.TaskExecution_RUNNING, tev.GetPhase()) + assert.Equal(t, uint32(1), tev.GetPhaseVersion()) + assert.Equal(t, l, tev.GetLogs()) + assert.Equal(t, c, tev.GetCustomInfo()) + assert.Equal(t, np, tev.GetOccurredAt()) + assert.Equal(t, tkID, tev.GetTaskId()) + assert.Equal(t, expectedNodeID, tev.GetParentNodeExecutionId()) assert.Equal(t, inputPath, tev.GetInputUri()) - assert.Nil(t, tev.OutputResult) - assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.Metadata.InstanceClass) - assert.Equal(t, containerTaskType, tev.TaskType) - assert.Equal(t, containerPluginIdentifier, tev.Metadata.PluginIdentifier) - assert.Equal(t, generatedName, tev.Metadata.GeneratedName) - assert.EqualValues(t, resourcePoolInfo, tev.Metadata.ResourcePoolInfo) - assert.Equal(t, testClusterID, tev.ProducerId) + assert.Nil(t, tev.GetOutputResult()) + assert.Equal(t, event.TaskExecutionMetadata_INTERRUPTIBLE, tev.GetMetadata().GetInstanceClass()) + assert.Equal(t, containerTaskType, tev.GetTaskType()) + assert.Equal(t, containerPluginIdentifier, tev.GetMetadata().GetPluginIdentifier()) + assert.Equal(t, generatedName, tev.GetMetadata().GetGeneratedName()) + assert.EqualValues(t, resourcePoolInfo, tev.GetMetadata().GetResourcePoolInfo()) + assert.Equal(t, testClusterID, tev.GetProducerId()) } diff --git a/flytepropeller/pkg/controller/nodes/task_reader.go b/flytepropeller/pkg/controller/nodes/task_reader.go index 5cc5654f63..baf8123944 100644 --- a/flytepropeller/pkg/controller/nodes/task_reader.go +++ b/flytepropeller/pkg/controller/nodes/task_reader.go @@ -12,7 +12,7 @@ type taskReader struct { } func (t taskReader) GetTaskType() v1alpha1.TaskType { - return t.TaskTemplate.Type + return t.TaskTemplate.GetType() } func (t taskReader) GetTaskID() *core.Identifier { diff --git a/flytepropeller/pkg/controller/nodes/transformers.go b/flytepropeller/pkg/controller/nodes/transformers.go index a252d17344..ceeaf5aaec 100644 --- a/flytepropeller/pkg/controller/nodes/transformers.go +++ b/flytepropeller/pkg/controller/nodes/transformers.go @@ -91,7 +91,7 @@ func ToNodeExecutionEvent( return nil, nil } if info.GetPhase() == handler.EPhaseUndefined { - return nil, fmt.Errorf("illegal state, undefined phase received for node [%s]", nodeExecID.NodeId) + return nil, fmt.Errorf("illegal state, undefined phase received for node [%s]", nodeExecID.GetNodeId()) } occurredTime, err := ptypes.TimestampProto(info.GetOccurredAt()) if err != nil { @@ -115,7 +115,7 @@ func ToNodeExecutionEvent( // Start node is special case where the Outputs are the same and hence here we copy the Output file // into the OutputResult and in admin we copy it over into input as well. // Start node doesn't have inputs. - if nodeExecID.NodeId == v1alpha1.StartNodeID { + if nodeExecID.GetNodeId() == v1alpha1.StartNodeID { outputsFile := v1alpha1.GetOutputsFile(status.GetOutputDir()) nev = &event.NodeExecutionEvent{ Id: nodeExecID, @@ -162,7 +162,7 @@ func ToNodeExecutionEvent( } if eventVersion != v1alpha1.EventVersion0 { - currentNodeUniqueID, err := common.GenerateUniqueID(parentInfo, nev.Id.NodeId) + currentNodeUniqueID, err := common.GenerateUniqueID(parentInfo, nev.GetId().GetNodeId()) if err != nil { return nil, err } @@ -210,7 +210,7 @@ func ToNodeExecutionEvent( } } else if dynamicNodePhase != v1alpha1.DynamicNodePhaseNone { nev.IsDynamic = true - if nev.GetTaskNodeMetadata() != nil && nev.GetTaskNodeMetadata().DynamicWorkflow != nil { + if nev.GetTaskNodeMetadata() != nil && nev.GetTaskNodeMetadata().GetDynamicWorkflow() != nil { nev.IsParent = true } } @@ -314,6 +314,7 @@ func UpdateNodeStatus(np v1alpha1.NodePhase, p handler.PhaseInfo, n interfaces.N t.SetSubNodeTaskPhases(na.SubNodeTaskPhases) t.SetSubNodeRetryAttempts(na.SubNodeRetryAttempts) t.SetSubNodeSystemFailures(na.SubNodeSystemFailures) + t.SetSubNodeDeltaTimestamps(na.SubNodeDeltaTimestamps) t.SetTaskPhaseVersion(na.TaskPhaseVersion) } } diff --git a/flytepropeller/pkg/controller/nodes/transformers_test.go b/flytepropeller/pkg/controller/nodes/transformers_test.go index 93a532a8d6..0bbc02f123 100644 --- a/flytepropeller/pkg/controller/nodes/transformers_test.go +++ b/flytepropeller/pkg/controller/nodes/transformers_test.go @@ -56,10 +56,10 @@ func TestToNodeExecutionEvent(t *testing.T) { RawOutputPolicy: config.RawOutputPolicyReference, }, nil) assert.NoError(t, err) - assert.True(t, nev.IsDynamic) - assert.True(t, nev.IsParent) - assert.Equal(t, nodeExecutionEventVersion, nev.EventVersion) - assert.True(t, nev.IsInDynamicChain) + assert.True(t, nev.GetIsDynamic()) + assert.True(t, nev.GetIsParent()) + assert.Equal(t, nodeExecutionEventVersion, nev.GetEventVersion()) + assert.True(t, nev.GetIsInDynamicChain()) }) t.Run("is parent", func(t *testing.T) { info := handler.PhaseInfoDynamicRunning(&handler.ExecutionInfo{TaskNodeInfo: &handler.TaskNodeInfo{ @@ -92,9 +92,9 @@ func TestToNodeExecutionEvent(t *testing.T) { RawOutputPolicy: config.RawOutputPolicyReference, }, nil) assert.NoError(t, err) - assert.False(t, nev.IsDynamic) - assert.True(t, nev.IsParent) - assert.Equal(t, nodeExecutionEventVersion, nev.EventVersion) + assert.False(t, nev.GetIsDynamic()) + assert.True(t, nev.GetIsParent()) + assert.Equal(t, nodeExecutionEventVersion, nev.GetEventVersion()) }) t.Run("inline events", func(t *testing.T) { inputs := &core.LiteralMap{ diff --git a/flytepropeller/pkg/controller/rate_limiter.go b/flytepropeller/pkg/controller/rate_limiter.go new file mode 100644 index 0000000000..100d6aa82c --- /dev/null +++ b/flytepropeller/pkg/controller/rate_limiter.go @@ -0,0 +1,116 @@ +package controller + +import ( + "context" + "sync" + "time" + + "golang.org/x/time/rate" + "k8s.io/client-go/util/workqueue" + + "github.com/flyteorg/flyte/flytepropeller/pkg/controller/interfaces" +) + +// limiterAdapter adapts rate.NewLimiter to use the Reservation interface so that it can be used in unittests. +type limiterAdapter struct { + limiter *rate.Limiter +} + +func NewLimiter(r rate.Limit, b int) interfaces.Limiter { + return &limiterAdapter{rate.NewLimiter(r, b)} +} + +func (l *limiterAdapter) Allow() bool { + return l.limiter.Allow() +} + +func (l *limiterAdapter) AllowN(t time.Time, n int) bool { + return l.limiter.AllowN(t, n) +} + +func (l *limiterAdapter) Burst() int { + return l.limiter.Burst() +} + +func (l *limiterAdapter) Limit() rate.Limit { + return l.limiter.Limit() +} + +func (l *limiterAdapter) Reserve() interfaces.Reservation { + return l.limiter.Reserve() +} + +func (l *limiterAdapter) ReserveN(t time.Time, n int) interfaces.Reservation { + return l.limiter.ReserveN(t, n) +} +func (l *limiterAdapter) SetBurst(newBurst int) { + l.limiter.SetBurst(newBurst) +} + +func (l *limiterAdapter) SetBurstAt(t time.Time, newBurst int) { + l.limiter.SetBurstAt(t, newBurst) +} + +func (l *limiterAdapter) SetLimit(newLimit rate.Limit) { + l.limiter.SetLimit(newLimit) +} + +func (l *limiterAdapter) SetLimitAt(t time.Time, newLimit rate.Limit) { + l.limiter.SetLimitAt(t, newLimit) +} + +func (l *limiterAdapter) Tokens() float64 { + return l.limiter.Tokens() +} + +func (l *limiterAdapter) TokensAt(t time.Time) float64 { + return l.limiter.TokensAt(t) +} + +func (l *limiterAdapter) Wait(ctx context.Context) (err error) { + return l.limiter.Wait(ctx) +} + +func (l *limiterAdapter) WaitN(ctx context.Context, n int) (err error) { + return l.limiter.WaitN(ctx, n) +} + +// Similar to the standard BucketRateLimiter but dedupes items in order to avoid reserving token slots for the +// same item multiple times. Intened to be used with a DelayingQueue, which dedupes items on insertion. +type dedupingBucketRateLimiter struct { + Limiter interfaces.Limiter + mu sync.Mutex + reservations map[interface{}]interfaces.Reservation +} + +func NewDedupingBucketRateLimiter(limiter interfaces.Limiter) workqueue.RateLimiter { + return &dedupingBucketRateLimiter{ + Limiter: limiter, + reservations: make(map[interface{}]interfaces.Reservation), + } +} + +var _ workqueue.RateLimiter = &dedupingBucketRateLimiter{} + +func (r *dedupingBucketRateLimiter) When(item interface{}) time.Duration { + r.mu.Lock() + defer r.mu.Unlock() + // Check if this item has an outstanding reservation. If so, use it to avoid a duplicate reservation. + if res, ok := r.reservations[item]; ok && res.Delay() > 0 { + return res.Delay() + } + r.reservations[item] = r.Limiter.Reserve() + return r.reservations[item].Delay() +} + +func (r *dedupingBucketRateLimiter) NumRequeues(item interface{}) int { + return 0 +} + +func (r *dedupingBucketRateLimiter) Forget(item interface{}) { + r.mu.Lock() + defer r.mu.Unlock() + if res, ok := r.reservations[item]; ok && res.Delay() <= 0 { + delete(r.reservations, item) + } +} diff --git a/flytepropeller/pkg/controller/rate_limiter_test.go b/flytepropeller/pkg/controller/rate_limiter_test.go new file mode 100644 index 0000000000..16e5bae417 --- /dev/null +++ b/flytepropeller/pkg/controller/rate_limiter_test.go @@ -0,0 +1,98 @@ +package controller + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "github.com/flyteorg/flyte/flytepropeller/pkg/controller/mocks" +) + +type rateLimiterTests struct { + suite.Suite + limiter *mocks.Limiter + deduping *dedupingBucketRateLimiter +} + +func TestDedupingBucketRateLimiter(t *testing.T) { + suite.Run(t, &rateLimiterTests{}) +} + +func (s *rateLimiterTests) SetupTest() { + s.limiter = mocks.NewLimiter(s.T()) + s.deduping = NewDedupingBucketRateLimiter(s.limiter).(*dedupingBucketRateLimiter) +} + +func (s *rateLimiterTests) TearDownTest() { + s.limiter.AssertExpectations(s.T()) +} + +func (s *rateLimiterTests) Test_When_NotFound() { + newReservation := mocks.NewReservation(s.T()) + defer newReservation.AssertExpectations(s.T()) + newReservation.EXPECT().Delay().Return(time.Minute).Once() + s.limiter.EXPECT().Reserve().Return(newReservation).Once() + + d := s.deduping.When("item1") + + assert.Equal(s.T(), newReservation, s.deduping.reservations["item1"]) + assert.Equal(s.T(), time.Minute, d) +} + +func (s *rateLimiterTests) Test_When_FoundPast() { + pastReservation := mocks.NewReservation(s.T()) + defer pastReservation.AssertExpectations(s.T()) + pastReservation.EXPECT().Delay().Return(-time.Minute).Once() + s.deduping.reservations["item1"] = pastReservation + newReservation := mocks.NewReservation(s.T()) + defer newReservation.AssertExpectations(s.T()) + newReservation.EXPECT().Delay().Return(time.Minute).Once() + s.limiter.EXPECT().Reserve().Return(newReservation).Once() + + d := s.deduping.When("item1") + + assert.Equal(s.T(), newReservation, s.deduping.reservations["item1"]) + assert.Equal(s.T(), time.Minute, d) +} + +func (s *rateLimiterTests) Test_When_FoundFuture() { + futureReservation := mocks.NewReservation(s.T()) + defer futureReservation.AssertExpectations(s.T()) + futureReservation.EXPECT().Delay().Return(time.Minute).Twice() + s.deduping.reservations["item1"] = futureReservation + + d := s.deduping.When("item1") + + assert.Equal(s.T(), futureReservation, s.deduping.reservations["item1"]) + assert.Equal(s.T(), time.Minute, d) +} + +func (s *rateLimiterTests) Test_Forget_NotFound() { + s.deduping.Forget("item1") + + assert.NotContains(s.T(), s.deduping.reservations, "item1") +} + +func (s *rateLimiterTests) Test_Forget_PastReservation() { + pastReservation := mocks.NewReservation(s.T()) + defer pastReservation.AssertExpectations(s.T()) + pastReservation.EXPECT().Delay().Return(-time.Minute).Once() + s.deduping.reservations["item1"] = pastReservation + + s.deduping.Forget("item1") + + assert.NotContains(s.T(), s.deduping.reservations, "item1") +} + +func (s *rateLimiterTests) Test_Forget_FutureReservation() { + futureReservation := mocks.NewReservation(s.T()) + defer futureReservation.AssertExpectations(s.T()) + futureReservation.EXPECT().Delay().Return(time.Minute).Once() + s.deduping.reservations["item1"] = futureReservation + + s.deduping.Forget("item1") + + assert.Equal(s.T(), futureReservation, s.deduping.reservations["item1"]) +} diff --git a/flytepropeller/pkg/controller/workflow/executor.go b/flytepropeller/pkg/controller/workflow/executor.go index 1982b405cb..14a3770cff 100644 --- a/flytepropeller/pkg/controller/workflow/executor.go +++ b/flytepropeller/pkg/controller/workflow/executor.go @@ -295,7 +295,7 @@ func (c *workflowExecutor) IdempotentReportEvent(ctx context.Context, e *event.W err := c.wfRecorder.RecordWorkflowEvent(ctx, e, c.eventConfig) if err != nil && eventsErr.IsAlreadyExists(err) { logger.Infof(ctx, "Workflow event phase: %s, executionId %s already exist", - e.Phase.String(), e.ExecutionId) + e.GetPhase().String(), e.GetExecutionId()) return nil } return err @@ -370,21 +370,21 @@ func (c *workflowExecutor) TransitionToPhase(ctx context.Context, execID *core.W if recordingErr := c.IdempotentReportEvent(ctx, wfEvent); recordingErr != nil { if eventsErr.IsAlreadyExists(recordingErr) { - logger.Warningf(ctx, "Failed to record workflowEvent, error [%s]. Trying to record state: %s. Ignoring this error!", recordingErr.Error(), wfEvent.Phase) + logger.Warningf(ctx, "Failed to record workflowEvent, error [%s]. Trying to record state: %s. Ignoring this error!", recordingErr.Error(), wfEvent.GetPhase()) return nil } if eventsErr.IsEventAlreadyInTerminalStateError(recordingErr) { // Move to WorkflowPhaseFailed for state mismatch - msg := fmt.Sprintf("workflow state mismatch between propeller and control plane; Propeller State: %s, ExecutionId %s", wfEvent.Phase.String(), wfEvent.ExecutionId) + msg := fmt.Sprintf("workflow state mismatch between propeller and control plane; Propeller State: %s, ExecutionId %s", wfEvent.GetPhase().String(), wfEvent.GetExecutionId()) logger.Warningf(ctx, msg) wStatus.UpdatePhase(v1alpha1.WorkflowPhaseFailed, msg, nil) return nil } - if (wfEvent.Phase == core.WorkflowExecution_FAILING || wfEvent.Phase == core.WorkflowExecution_FAILED) && + if (wfEvent.GetPhase() == core.WorkflowExecution_FAILING || wfEvent.GetPhase() == core.WorkflowExecution_FAILED) && (eventsErr.IsNotFound(recordingErr) || eventsErr.IsEventIncompatibleClusterError(recordingErr)) { // Don't stall the workflow transition to terminated (so that resources can be cleaned up) since these events // are being discarded by the back-end anyways. - logger.Infof(ctx, "Failed to record %s workflowEvent, error [%s]. Ignoring this error!", wfEvent.Phase.String(), recordingErr.Error()) + logger.Infof(ctx, "Failed to record %s workflowEvent, error [%s]. Ignoring this error!", wfEvent.GetPhase().String(), recordingErr.Error()) return nil } logger.Warningf(ctx, "Event recording failed. Error [%s]", recordingErr.Error()) @@ -461,7 +461,7 @@ func (c *workflowExecutor) HandleFlyteWorkflow(ctx context.Context, w *v1alpha1. case v1alpha1.WorkflowPhaseHandlingFailureNode: newStatus, err := c.handleFailureNode(ctx, w) if err != nil { - return errors.Errorf("failed to handle failure node for workflow [%s], err: [%s]", w.ID, err.Error()) + return errors.Errorf("failed to handle failure node for workflow [%s], err: [%s]", w.ID, err.Error()) //nolint:govet,staticcheck } failureErr := c.TransitionToPhase(ctx, w.ExecutionID.WorkflowExecutionIdentifier, wStatus, newStatus) // Ignore ExecutionNotFound and IncompatibleCluster errors to allow graceful failure diff --git a/flytepropeller/pkg/controller/workflow/executor_test.go b/flytepropeller/pkg/controller/workflow/executor_test.go index 2be7238dbb..187aac7ead 100644 --- a/flytepropeller/pkg/controller/workflow/executor_test.go +++ b/flytepropeller/pkg/controller/workflow/executor_test.go @@ -88,14 +88,14 @@ func (f fakeRemoteWritePlugin) Handle(ctx context.Context, tCtx pluginCore.TaskE if trns.Info().Phase() == pluginCore.PhaseSuccess { tk, err := tCtx.TaskReader().Read(ctx) assert.NoError(f.t, err) - outputVars := tk.GetInterface().Outputs.Variables + outputVars := tk.GetInterface().GetOutputs().GetVariables() o := &core.LiteralMap{ Literals: make(map[string]*core.Literal, len(outputVars)), } for k, v := range outputVars { - l, err := coreutils.MakeDefaultLiteralForType(v.Type) + l, err := coreutils.MakeDefaultLiteralForType(v.GetType()) if f.enableAsserts && !assert.NoError(f.t, err) { - assert.FailNow(f.t, "Failed to create default output for node [%v] Type [%v]", tCtx.TaskExecutionMetadata().GetTaskExecutionID(), v.Type) + assert.FailNow(f.t, "Failed to create default output for node [%v] Type [%v]", tCtx.TaskExecutionMetadata().GetTaskExecutionID(), v.GetType()) } o.Literals[k] = l } @@ -488,21 +488,21 @@ func TestWorkflowExecutor_HandleFlyteWorkflow_Failing(t *testing.T) { if ok { assert.True(t, ok) - switch e.Phase { + switch e.GetPhase() { case core.WorkflowExecution_RUNNING: - assert.WithinDuration(t, e.OccurredAt.AsTime(), time.Now(), time.Millisecond*5) - assert.Equal(t, testClusterID, e.ProducerId) + assert.WithinDuration(t, e.GetOccurredAt().AsTime(), time.Now(), time.Millisecond*5) + assert.Equal(t, testClusterID, e.GetProducerId()) recordedRunning = true case core.WorkflowExecution_FAILING: - assert.WithinDuration(t, e.OccurredAt.AsTime(), time.Now(), time.Millisecond*5) - assert.Equal(t, testClusterID, e.ProducerId) + assert.WithinDuration(t, e.GetOccurredAt().AsTime(), time.Now(), time.Millisecond*5) + assert.Equal(t, testClusterID, e.GetProducerId()) recordedFailing = true case core.WorkflowExecution_FAILED: - assert.WithinDuration(t, e.OccurredAt.AsTime(), time.Now(), time.Millisecond*5) - assert.Equal(t, testClusterID, e.ProducerId) + assert.WithinDuration(t, e.GetOccurredAt().AsTime(), time.Now(), time.Millisecond*5) + assert.Equal(t, testClusterID, e.GetProducerId()) recordedFailed = true default: - return fmt.Errorf("MockWorkflowRecorder should not have entered into any other states [%v]", e.Phase) + return fmt.Errorf("MockWorkflowRecorder should not have entered into any other states [%v]", e.GetPhase()) } } return nil @@ -591,30 +591,30 @@ func TestWorkflowExecutor_HandleFlyteWorkflow_Events(t *testing.T) { eventSink.SinkCb = func(ctx context.Context, message proto.Message) error { e, ok := message.(*event.WorkflowExecutionEvent) if ok { - switch e.Phase { + switch e.GetPhase() { case core.WorkflowExecution_RUNNING: - occuredAt, err := ptypes.Timestamp(e.OccurredAt) + occuredAt, err := ptypes.Timestamp(e.GetOccurredAt()) assert.NoError(t, err) assert.WithinDuration(t, occuredAt, time.Now(), time.Millisecond*5) - assert.Equal(t, testClusterID, e.ProducerId) + assert.Equal(t, testClusterID, e.GetProducerId()) recordedRunning = true case core.WorkflowExecution_SUCCEEDING: - occuredAt, err := ptypes.Timestamp(e.OccurredAt) + occuredAt, err := ptypes.Timestamp(e.GetOccurredAt()) assert.NoError(t, err) assert.WithinDuration(t, occuredAt, time.Now(), time.Millisecond*5) - assert.Equal(t, testClusterID, e.ProducerId) + assert.Equal(t, testClusterID, e.GetProducerId()) recordedFailing = true case core.WorkflowExecution_SUCCEEDED: - occuredAt, err := ptypes.Timestamp(e.OccurredAt) + occuredAt, err := ptypes.Timestamp(e.GetOccurredAt()) assert.NoError(t, err) assert.WithinDuration(t, occuredAt, time.Now(), time.Millisecond*5) - assert.Equal(t, testClusterID, e.ProducerId) + assert.Equal(t, testClusterID, e.GetProducerId()) recordedSuccess = true default: - return fmt.Errorf("MockWorkflowRecorder should not have entered into any other states, received [%v]", e.Phase.String()) + return fmt.Errorf("MockWorkflowRecorder should not have entered into any other states, received [%v]", e.GetPhase().String()) } } return nil @@ -819,7 +819,7 @@ func TestWorkflowExecutor_HandleAbortedWorkflow(t *testing.T) { nodeExec := &nodemocks.Node{} wfRecorder := &eventMocks.WorkflowEventRecorder{} wfRecorder.On("RecordWorkflowEvent", mock.Anything, mock.MatchedBy(func(ev *event.WorkflowExecutionEvent) bool { - assert.Equal(t, testClusterID, ev.ProducerId) + assert.Equal(t, testClusterID, ev.GetProducerId()) evs = append(evs, ev) return true }), mock.Anything).Return(nil) @@ -861,7 +861,7 @@ func TestWorkflowExecutor_HandleAbortedWorkflow(t *testing.T) { nodeExec := &nodemocks.Node{} wfRecorder := &eventMocks.WorkflowEventRecorder{} wfRecorder.OnRecordWorkflowEventMatch(mock.Anything, mock.MatchedBy(func(ev *event.WorkflowExecutionEvent) bool { - assert.Equal(t, testClusterID, ev.ProducerId) + assert.Equal(t, testClusterID, ev.GetProducerId()) evs = append(evs, ev) return true }), mock.Anything).Return(nil) @@ -902,7 +902,7 @@ func TestWorkflowExecutor_HandleAbortedWorkflow(t *testing.T) { nodeExec := &nodemocks.Node{} wfRecorder := &eventMocks.WorkflowEventRecorder{} wfRecorder.OnRecordWorkflowEventMatch(mock.Anything, mock.MatchedBy(func(ev *event.WorkflowExecutionEvent) bool { - assert.Equal(t, testClusterID, ev.ProducerId) + assert.Equal(t, testClusterID, ev.GetProducerId()) evs = append(evs, ev) return true }), mock.Anything).Return(nil) diff --git a/flytepropeller/pkg/controller/workflowstore/execution_stats_test.go b/flytepropeller/pkg/controller/workflowstore/execution_stats_test.go index 783e2ba688..b3c7bb44fe 100644 --- a/flytepropeller/pkg/controller/workflowstore/execution_stats_test.go +++ b/flytepropeller/pkg/controller/workflowstore/execution_stats_test.go @@ -89,7 +89,7 @@ func TestConcurrentAccess(t *testing.T) { go func(id int) { defer wg.Done() execID := fmt.Sprintf("exec%d", id) - err := esh.AddOrUpdateEntry(execID, SingleExecutionStats{ActiveNodeCount: uint32(id), ActiveTaskCount: uint32(id * 2)}) + err := esh.AddOrUpdateEntry(execID, SingleExecutionStats{ActiveNodeCount: uint32(id), ActiveTaskCount: uint32(id * 2)}) // #nosec G115 assert.NoError(t, err) }(i) } diff --git a/flytepropeller/pkg/controller/workqueue.go b/flytepropeller/pkg/controller/workqueue.go index 1d10cb5f2a..fcf97c3e1c 100644 --- a/flytepropeller/pkg/controller/workqueue.go +++ b/flytepropeller/pkg/controller/workqueue.go @@ -18,10 +18,8 @@ func NewWorkQueue(ctx context.Context, cfg config.WorkqueueConfig, name string) case config.WorkqueueTypeBucketRateLimiter: logger.Infof(ctx, "Using Bucket Ratelimited Workqueue, Rate [%v] Capacity [%v]", cfg.Rate, cfg.Capacity) return workqueue.NewNamedRateLimitingQueue( - // 10 qps, 100 bucket size. This is only for retry speed and its only the overall factor (not per item) - &workqueue.BucketRateLimiter{ - Limiter: rate.NewLimiter(rate.Limit(cfg.Rate), cfg.Capacity), - }, name), nil + NewDedupingBucketRateLimiter(NewLimiter(rate.Limit(cfg.Rate), cfg.Capacity)), + name), nil case config.WorkqueueTypeExponentialFailureRateLimiter: logger.Infof(ctx, "Using Exponential failure backoff Ratelimited Workqueue, Base Delay [%v], max Delay [%v]", cfg.BaseDelay, cfg.MaxDelay) return workqueue.NewNamedRateLimitingQueue( @@ -31,9 +29,7 @@ func NewWorkQueue(ctx context.Context, cfg config.WorkqueueConfig, name string) logger.Infof(ctx, "Using Max-of Ratelimited Workqueue, Bucket {Rate [%v] Capacity [%v]} | FailureBackoff {Base Delay [%v], max Delay [%v]}", cfg.Rate, cfg.Capacity, cfg.BaseDelay, cfg.MaxDelay) return workqueue.NewNamedRateLimitingQueue( workqueue.NewMaxOfRateLimiter( - &workqueue.BucketRateLimiter{ - Limiter: rate.NewLimiter(rate.Limit(cfg.Rate), cfg.Capacity), - }, + NewDedupingBucketRateLimiter(NewLimiter(rate.Limit(cfg.Rate), cfg.Capacity)), workqueue.NewItemExponentialFailureRateLimiter(cfg.BaseDelay.Duration, cfg.MaxDelay.Duration), ), name), nil diff --git a/flytepropeller/pkg/utils/assert/literals.go b/flytepropeller/pkg/utils/assert/literals.go index 66f57c328e..c0fac675ed 100644 --- a/flytepropeller/pkg/utils/assert/literals.go +++ b/flytepropeller/pkg/utils/assert/literals.go @@ -16,14 +16,14 @@ func EqualPrimitive(t *testing.T, p1 *core.Primitive, p2 *core.Primitive) { if p1 == nil { return } - assert.Equal(t, reflect.TypeOf(p1.Value), reflect.TypeOf(p2.Value)) - switch p1.Value.(type) { + assert.Equal(t, reflect.TypeOf(p1.GetValue()), reflect.TypeOf(p2.GetValue())) + switch p1.GetValue().(type) { case *core.Primitive_Integer: assert.Equal(t, p1.GetInteger(), p2.GetInteger()) case *core.Primitive_StringValue: assert.Equal(t, p1.GetStringValue(), p2.GetStringValue()) default: - assert.FailNow(t, "Not yet implemented for types %v", reflect.TypeOf(p1.Value)) + assert.FailNow(t, "Not yet implemented for types %v", reflect.TypeOf(p1.GetValue())) } } @@ -34,12 +34,12 @@ func EqualScalar(t *testing.T, p1 *core.Scalar, p2 *core.Scalar) { if p1 == nil { return } - assert.Equal(t, reflect.TypeOf(p1.Value), reflect.TypeOf(p2.Value)) - switch p1.Value.(type) { + assert.Equal(t, reflect.TypeOf(p1.GetValue()), reflect.TypeOf(p2.GetValue())) + switch p1.GetValue().(type) { case *core.Scalar_Primitive: EqualPrimitive(t, p1.GetPrimitive(), p2.GetPrimitive()) default: - assert.FailNow(t, "Not yet implemented for types %v", reflect.TypeOf(p1.Value)) + assert.FailNow(t, "Not yet implemented for types %v", reflect.TypeOf(p1.GetValue())) } } @@ -50,8 +50,8 @@ func EqualLiterals(t *testing.T, l1 *core.Literal, l2 *core.Literal) { if l1 == nil { return } - assert.Equal(t, reflect.TypeOf(l1.Value), reflect.TypeOf(l2.Value)) - switch l1.Value.(type) { + assert.Equal(t, reflect.TypeOf(l1.GetValue()), reflect.TypeOf(l2.GetValue())) + switch l1.GetValue().(type) { case *core.Literal_Scalar: EqualScalar(t, l1.GetScalar(), l2.GetScalar()) case *core.Literal_Map: @@ -63,9 +63,9 @@ func EqualLiterals(t *testing.T, l1 *core.Literal, l2 *core.Literal) { func EqualLiteralMap(t *testing.T, l1 *core.LiteralMap, l2 *core.LiteralMap) { if assert.NotNil(t, l1, "l1 is nil") && assert.NotNil(t, l2, "l2 is nil") { - assert.Equal(t, len(l1.Literals), len(l2.Literals)) - for k, v := range l1.Literals { - actual, ok := l2.Literals[k] + assert.Equal(t, len(l1.GetLiterals()), len(l2.GetLiterals())) + for k, v := range l1.GetLiterals() { + actual, ok := l2.GetLiterals()[k] assert.True(t, ok) EqualLiterals(t, v, actual) } @@ -74,9 +74,9 @@ func EqualLiteralMap(t *testing.T, l1 *core.LiteralMap, l2 *core.LiteralMap) { func EqualLiteralCollection(t *testing.T, l1 *core.LiteralCollection, l2 *core.LiteralCollection) { if assert.NotNil(t, l2) { - assert.Equal(t, len(l1.Literals), len(l2.Literals)) - for i, v := range l1.Literals { - EqualLiterals(t, v, l2.Literals[i]) + assert.Equal(t, len(l1.GetLiterals()), len(l2.GetLiterals())) + for i, v := range l1.GetLiterals() { + EqualLiterals(t, v, l2.GetLiterals()[i]) } } } diff --git a/flytepropeller/pkg/utils/bindings_test.go b/flytepropeller/pkg/utils/bindings_test.go index c6cb5fcc12..8067e256eb 100644 --- a/flytepropeller/pkg/utils/bindings_test.go +++ b/flytepropeller/pkg/utils/bindings_test.go @@ -18,7 +18,7 @@ func TestMakePrimitiveBinding(t *testing.T) { assert.Equal(t, "x", x.GetVar()) p := x.GetBinding() assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v, p.GetScalar().GetPrimitive().GetFloatValue()) } { @@ -36,7 +36,7 @@ func TestMustMakePrimitiveBinding(t *testing.T) { assert.Equal(t, "x", x.GetVar()) p := x.GetBinding() assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_FloatValue", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v, p.GetScalar().GetPrimitive().GetFloatValue()) } { @@ -62,26 +62,26 @@ func TestMakeBindingDataCollection(t *testing.T) { ) assert.NotNil(t, c.GetCollection()) - assert.Equal(t, 2, len(c.GetCollection().Bindings)) + assert.Equal(t, 2, len(c.GetCollection().GetBindings())) { p := c.GetCollection().GetBindings()[0] assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v1, p.GetScalar().GetPrimitive().GetInteger()) } { p := c.GetCollection().GetBindings()[1] assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_StringValue", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_StringValue", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v2, p.GetScalar().GetPrimitive().GetStringValue()) } assert.NotNil(t, c2.GetCollection()) - assert.Equal(t, 2, len(c2.GetCollection().Bindings)) + assert.Equal(t, 2, len(c2.GetCollection().GetBindings())) { p := c2.GetCollection().GetBindings()[0] assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v1, p.GetScalar().GetPrimitive().GetInteger()) } { @@ -113,7 +113,7 @@ func TestMakeBindingDataMap(t *testing.T) { { p := m.GetMap().GetBindings()["x"] assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v1, p.GetScalar().GetPrimitive().GetInteger()) } { @@ -127,7 +127,7 @@ func TestMakeBindingDataMap(t *testing.T) { { p := m2.GetMap().GetBindings()["x"] assert.NotNil(t, p.GetScalar()) - assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().Value).String()) + assert.Equal(t, "*core.Primitive_Integer", reflect.TypeOf(p.GetScalar().GetPrimitive().GetValue()).String()) assert.Equal(t, v1, p.GetScalar().GetPrimitive().GetInteger()) } { diff --git a/flytepropeller/pkg/utils/k8s.go b/flytepropeller/pkg/utils/k8s.go index f666fd9013..7ef53ead38 100644 --- a/flytepropeller/pkg/utils/k8s.go +++ b/flytepropeller/pkg/utils/k8s.go @@ -37,7 +37,7 @@ var invalidDNS1123Characters = regexp.MustCompile("[^-a-z0-9]+") func ToK8sEnvVar(env []*core.KeyValuePair) []v1.EnvVar { envVars := make([]v1.EnvVar, 0, len(env)) for _, kv := range env { - envVars = append(envVars, v1.EnvVar{Name: kv.Key, Value: kv.Value}) + envVars = append(envVars, v1.EnvVar{Name: kv.GetKey(), Value: kv.GetValue()}) } return envVars } diff --git a/flytepropeller/pkg/visualize/visualize.go b/flytepropeller/pkg/visualize/visualize.go index 6a5ee7ba11..14128fde84 100644 --- a/flytepropeller/pkg/visualize/visualize.go +++ b/flytepropeller/pkg/visualize/visualize.go @@ -34,11 +34,11 @@ func flatten(binding *core.BindingData, flatMap map[common.NodeID]sets.String) { flatten(v, flatMap) } case *core.BindingData_Promise: - if _, ok := flatMap[binding.GetPromise().NodeId]; !ok { - flatMap[binding.GetPromise().NodeId] = sets.String{} + if _, ok := flatMap[binding.GetPromise().GetNodeId()]; !ok { + flatMap[binding.GetPromise().GetNodeId()] = sets.String{} } - flatMap[binding.GetPromise().NodeId].Insert(binding.GetPromise().GetVar()) + flatMap[binding.GetPromise().GetNodeId()].Insert(binding.GetPromise().GetVar()) case *core.BindingData_Scalar: if _, ok := flatMap[staticNodeID]; !ok { flatMap[staticNodeID] = sets.NewString() @@ -142,11 +142,11 @@ func WorkflowToGraphViz(g *v1alpha1.FlyteWorkflow) string { func ToGraphViz(g *core.CompiledWorkflow) string { res := fmt.Sprintf("digraph G {rankdir=TB;workflow[label=\"Workflow Id: %v\"];node[style=filled];", - g.Template.GetId()) + g.GetTemplate().GetId()) nodeFinder := func(nodeId common.NodeID) *core.Node { - for _, n := range g.Template.Nodes { - if n.Id == nodeId { + for _, n := range g.GetTemplate().GetNodes() { + if n.GetId() == nodeId { return n } } @@ -204,9 +204,9 @@ func ToGraphViz(g *core.CompiledWorkflow) string { node := nodesToVisit.Deque() nodes, found := g.GetConnections().GetDownstream()[node] if found { - nodesToVisit.Enqueue(nodes.Ids...) + nodesToVisit.Enqueue(nodes.GetIds()...) - for _, child := range nodes.Ids { + for _, child := range nodes.GetIds() { label := edgeLabel(node, child) edge := fmt.Sprintf("\"%v\" -> \"%v\" [label=\"%v\",style=\"%v\"];", nodeLabel(node), diff --git a/flytepropeller/pkg/webhook/aws_secret_manager.go b/flytepropeller/pkg/webhook/aws_secret_manager.go index d1595ffc1e..ad5e8c48f4 100644 --- a/flytepropeller/pkg/webhook/aws_secret_manager.go +++ b/flytepropeller/pkg/webhook/aws_secret_manager.go @@ -47,7 +47,7 @@ type AWSSecretManagerInjector struct { } func formatAWSSecretArn(secret *core.Secret) string { - return strings.TrimRight(secret.Group, ":") + ":" + strings.TrimLeft(secret.Key, ":") + return strings.TrimRight(secret.GetGroup(), ":") + ":" + strings.TrimLeft(secret.GetKey(), ":") } func formatAWSInitContainerName(index int) string { @@ -59,12 +59,12 @@ func (i AWSSecretManagerInjector) Type() config.SecretManagerType { } func (i AWSSecretManagerInjector) Inject(ctx context.Context, secret *core.Secret, p *corev1.Pod) (newP *corev1.Pod, injected bool, err error) { - if len(secret.Group) == 0 || len(secret.Key) == 0 { + if len(secret.GetGroup()) == 0 || len(secret.GetKey()) == 0 { return nil, false, fmt.Errorf("AWS Secrets Webhook require both key and group to be set. "+ "Secret: [%v]", secret) } - switch secret.MountRequirement { + switch secret.GetMountRequirement() { case core.Secret_ANY: fallthrough case core.Secret_FILE: @@ -112,7 +112,7 @@ func (i AWSSecretManagerInjector) Inject(ctx context.Context, secret *core.Secre case core.Secret_ENV_VAR: fallthrough default: - err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.MountRequirement.String(), secret.Key) + err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.GetMountRequirement().String(), secret.GetKey()) logger.Error(ctx, err) return p, false, err } @@ -138,7 +138,7 @@ func createAWSSidecarContainer(cfg config.AWSSecretManagerConfig, p *corev1.Pod, }, { Name: AWSSecretFilenameEnvVar, - Value: filepath.Join(string(filepath.Separator), strings.ToLower(secret.Group), strings.ToLower(secret.Key)), + Value: filepath.Join(string(filepath.Separator), strings.ToLower(secret.GetGroup()), strings.ToLower(secret.GetKey())), }, }, Resources: cfg.Resources, diff --git a/flytepropeller/pkg/webhook/aws_secret_manager_test.go b/flytepropeller/pkg/webhook/aws_secret_manager_test.go index d2a74de80b..cb0c9ddba4 100644 --- a/flytepropeller/pkg/webhook/aws_secret_manager_test.go +++ b/flytepropeller/pkg/webhook/aws_secret_manager_test.go @@ -44,11 +44,11 @@ func TestAWSSecretManagerInjector_Inject(t *testing.T) { Env: []corev1.EnvVar{ { Name: "SECRET_ARN", - Value: inputSecret.Group + ":" + inputSecret.Key, + Value: inputSecret.GetGroup() + ":" + inputSecret.GetKey(), }, { Name: "SECRET_FILENAME", - Value: "/" + inputSecret.Group + "/" + inputSecret.Key, + Value: "/" + inputSecret.GetGroup() + "/" + inputSecret.GetKey(), }, { Name: "FLYTE_SECRETS_DEFAULT_DIR", diff --git a/flytepropeller/pkg/webhook/gcp_secret_manager.go b/flytepropeller/pkg/webhook/gcp_secret_manager.go index c69705594e..4db4a0d3ab 100644 --- a/flytepropeller/pkg/webhook/gcp_secret_manager.go +++ b/flytepropeller/pkg/webhook/gcp_secret_manager.go @@ -41,12 +41,12 @@ func formatGCPSecretAccessCommand(secret *core.Secret) []string { // `gcloud` writes this file with permission 0600. // This will cause permission issues in the main container when using non-root // users, so we fix the file permissions with `chmod`. - secretDir := strings.ToLower(filepath.Join(GCPSecretMountPath, secret.Group)) - secretPath := strings.ToLower(filepath.Join(secretDir, secret.GroupVersion)) + secretDir := strings.ToLower(filepath.Join(GCPSecretMountPath, secret.GetGroup())) + secretPath := strings.ToLower(filepath.Join(secretDir, secret.GetGroupVersion())) args := fmt.Sprintf( "gcloud secrets versions access %[1]s/versions/%[2]s --out-file=%[4]s || gcloud secrets versions access %[2]s --secret=%[1]s --out-file=%[4]s; chmod +rX %[3]s %[4]s", - secret.Group, - secret.GroupVersion, + secret.GetGroup(), + secret.GetGroupVersion(), secretDir, secretPath, ) @@ -62,12 +62,12 @@ func (i GCPSecretManagerInjector) Type() config.SecretManagerType { } func (i GCPSecretManagerInjector) Inject(ctx context.Context, secret *core.Secret, p *corev1.Pod) (newP *corev1.Pod, injected bool, err error) { - if len(secret.Group) == 0 || len(secret.GroupVersion) == 0 { + if len(secret.GetGroup()) == 0 || len(secret.GetGroupVersion()) == 0 { return nil, false, fmt.Errorf("GCP Secrets Webhook require both group and group version to be set. "+ "Secret: [%v]", secret) } - switch secret.MountRequirement { + switch secret.GetMountRequirement() { case core.Secret_ANY: fallthrough case core.Secret_FILE: @@ -115,7 +115,7 @@ func (i GCPSecretManagerInjector) Inject(ctx context.Context, secret *core.Secre case core.Secret_ENV_VAR: fallthrough default: - err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.MountRequirement.String(), secret.Key) + err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.GetMountRequirement().String(), secret.GetKey()) logger.Error(ctx, err) return p, false, err } diff --git a/flytepropeller/pkg/webhook/global_secrets.go b/flytepropeller/pkg/webhook/global_secrets.go index a4b3543fb1..21432f6ccc 100644 --- a/flytepropeller/pkg/webhook/global_secrets.go +++ b/flytepropeller/pkg/webhook/global_secrets.go @@ -35,20 +35,20 @@ func (g GlobalSecrets) Inject(ctx context.Context, secret *coreIdl.Secret, p *co return p, false, err } - switch secret.MountRequirement { + switch secret.GetMountRequirement() { case coreIdl.Secret_FILE: return nil, false, fmt.Errorf("global secrets can only be injected as environment "+ - "variables [%v/%v]", secret.Group, secret.Key) + "variables [%v/%v]", secret.GetGroup(), secret.GetKey()) case coreIdl.Secret_ANY: fallthrough case coreIdl.Secret_ENV_VAR: - if len(secret.Group) == 0 { + if len(secret.GetGroup()) == 0 { return nil, false, fmt.Errorf("mounting a secret to env var requires selecting the "+ - "secret and a single key within. Key [%v]", secret.Key) + "secret and a single key within. Key [%v]", secret.GetKey()) } envVar := corev1.EnvVar{ - Name: strings.ToUpper(K8sDefaultEnvVarPrefix + secret.Group + EnvVarGroupKeySeparator + secret.Key), + Name: strings.ToUpper(K8sDefaultEnvVarPrefix + secret.GetGroup() + EnvVarGroupKeySeparator + secret.GetKey()), Value: v, } @@ -63,7 +63,7 @@ func (g GlobalSecrets) Inject(ctx context.Context, secret *coreIdl.Secret, p *co p.Spec.InitContainers = AppendEnvVars(p.Spec.InitContainers, envVar) p.Spec.Containers = AppendEnvVars(p.Spec.Containers, envVar) default: - err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.MountRequirement.String(), secret.Key) + err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.GetMountRequirement().String(), secret.GetKey()) logger.Error(ctx, err) return p, false, err } diff --git a/flytepropeller/pkg/webhook/k8s_secrets.go b/flytepropeller/pkg/webhook/k8s_secrets.go index 102d1ae6c1..68bb8669d2 100644 --- a/flytepropeller/pkg/webhook/k8s_secrets.go +++ b/flytepropeller/pkg/webhook/k8s_secrets.go @@ -38,12 +38,12 @@ func (i K8sSecretInjector) Type() config.SecretManagerType { } func (i K8sSecretInjector) Inject(ctx context.Context, secret *core.Secret, p *corev1.Pod) (newP *corev1.Pod, injected bool, err error) { - if len(secret.Group) == 0 || len(secret.Key) == 0 { + if len(secret.GetGroup()) == 0 || len(secret.GetKey()) == 0 { return nil, false, fmt.Errorf("k8s Secrets Webhook require both key and group to be set. "+ "Secret: [%v]", secret) } - switch secret.MountRequirement { + switch secret.GetMountRequirement() { case core.Secret_ANY: fallthrough case core.Secret_FILE: @@ -88,7 +88,7 @@ func (i K8sSecretInjector) Inject(ctx context.Context, secret *core.Secret, p *c p.Spec.InitContainers = AppendEnvVars(p.Spec.InitContainers, prefixEnvVar) p.Spec.Containers = AppendEnvVars(p.Spec.Containers, prefixEnvVar) default: - err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.MountRequirement.String(), secret.Key) + err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.GetMountRequirement().String(), secret.GetKey()) logger.Error(ctx, err) return p, false, err } diff --git a/flytepropeller/pkg/webhook/utils.go b/flytepropeller/pkg/webhook/utils.go index 92a4995c24..9d40cbbe6f 100644 --- a/flytepropeller/pkg/webhook/utils.go +++ b/flytepropeller/pkg/webhook/utils.go @@ -26,13 +26,13 @@ func hasEnvVar(envVars []corev1.EnvVar, envVarKey string) bool { func CreateEnvVarForSecret(secret *core.Secret) corev1.EnvVar { optional := true return corev1.EnvVar{ - Name: strings.ToUpper(K8sDefaultEnvVarPrefix + secret.Group + EnvVarGroupKeySeparator + secret.Key), + Name: strings.ToUpper(K8sDefaultEnvVarPrefix + secret.GetGroup() + EnvVarGroupKeySeparator + secret.GetKey()), ValueFrom: &corev1.EnvVarSource{ SecretKeyRef: &corev1.SecretKeySelector{ LocalObjectReference: corev1.LocalObjectReference{ - Name: secret.Group, + Name: secret.GetGroup(), }, - Key: secret.Key, + Key: secret.GetKey(), Optional: &optional, }, }, @@ -43,14 +43,14 @@ func CreateVolumeForSecret(secret *core.Secret) corev1.Volume { optional := true return corev1.Volume{ // we don't want to create different volume for the same secret group - Name: encoding.Base32Encoder.EncodeToString([]byte(secret.Group + EnvVarGroupKeySeparator + secret.GroupVersion)), + Name: encoding.Base32Encoder.EncodeToString([]byte(secret.GetGroup() + EnvVarGroupKeySeparator + secret.GetGroupVersion())), VolumeSource: corev1.VolumeSource{ Secret: &corev1.SecretVolumeSource{ - SecretName: secret.Group, + SecretName: secret.GetGroup(), Items: []corev1.KeyToPath{ { - Key: secret.Key, - Path: strings.ToLower(secret.Key), + Key: secret.GetKey(), + Path: strings.ToLower(secret.GetKey()), }, }, Optional: &optional, @@ -63,7 +63,7 @@ func CreateVolumeMountForSecret(volumeName string, secret *core.Secret) corev1.V return corev1.VolumeMount{ Name: volumeName, ReadOnly: true, - MountPath: filepath.Join(filepath.Join(K8sSecretPathPrefix...), strings.ToLower(secret.Group)), + MountPath: filepath.Join(filepath.Join(K8sSecretPathPrefix...), strings.ToLower(secret.GetGroup())), } } @@ -130,15 +130,15 @@ func CreateVaultAnnotationsForSecret(secret *core.Secret, kvversion config.KVVer id := string(uuid.NewUUID()) secretVaultAnnotations := map[string]string{ - fmt.Sprintf("vault.hashicorp.com/agent-inject-secret-%s", id): secret.Group, - fmt.Sprintf("vault.hashicorp.com/agent-inject-file-%s", id): fmt.Sprintf("%s/%s", secret.Group, secret.Key), + fmt.Sprintf("vault.hashicorp.com/agent-inject-secret-%s", id): secret.GetGroup(), + fmt.Sprintf("vault.hashicorp.com/agent-inject-file-%s", id): fmt.Sprintf("%s/%s", secret.GetGroup(), secret.GetKey()), } // Set the consul template language query depending on the KV Secrets Engine version. // Version 1 stores plain k:v pairs under .Data, version 2 supports versioned secrets // and wraps the k:v pairs into an additional subfield. var query string - switch secret.GroupVersion { + switch secret.GetGroupVersion() { case "kv1": query = ".Data" case "kv2": @@ -157,7 +157,7 @@ func CreateVaultAnnotationsForSecret(secret *core.Secret, kvversion config.KVVer } } if query != "" { - template := fmt.Sprintf(`{{- with secret "%s" -}}{{ %s.%s }}{{- end -}}`, secret.Group, query, secret.Key) + template := fmt.Sprintf(`{{- with secret "%s" -}}{{ %s.%s }}{{- end -}}`, secret.GetGroup(), query, secret.GetKey()) secretVaultAnnotations[fmt.Sprintf("vault.hashicorp.com/agent-inject-template-%s", id)] = template } diff --git a/flytepropeller/pkg/webhook/vault_secret_manager.go b/flytepropeller/pkg/webhook/vault_secret_manager.go index 658e3970d1..e5430153d6 100644 --- a/flytepropeller/pkg/webhook/vault_secret_manager.go +++ b/flytepropeller/pkg/webhook/vault_secret_manager.go @@ -35,12 +35,12 @@ func (i VaultSecretManagerInjector) Type() config.SecretManagerType { } func (i VaultSecretManagerInjector) Inject(ctx context.Context, secret *coreIdl.Secret, p *corev1.Pod) (newP *corev1.Pod, injected bool, err error) { - if len(secret.Group) == 0 || len(secret.Key) == 0 { + if len(secret.GetGroup()) == 0 || len(secret.GetKey()) == 0 { return nil, false, fmt.Errorf("Vault Secrets Webhook requires both key and group to be set. "+ "Secret: [%v]", secret) } - switch secret.MountRequirement { + switch secret.GetMountRequirement() { case coreIdl.Secret_ANY: fallthrough case coreIdl.Secret_FILE: @@ -76,7 +76,7 @@ func (i VaultSecretManagerInjector) Inject(ctx context.Context, secret *coreIdl. case coreIdl.Secret_ENV_VAR: return p, false, fmt.Errorf("Env_Var is not a supported mount requirement for Vault Secret Manager") default: - err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.MountRequirement.String(), secret.Key) + err := fmt.Errorf("unrecognized mount requirement [%v] for secret [%v]", secret.GetMountRequirement().String(), secret.GetKey()) logger.Error(ctx, err) return p, false, err } diff --git a/flytestdlib/.golangci.yml b/flytestdlib/.golangci.yml index e3bff2320b..4ae605454b 100644 --- a/flytestdlib/.golangci.yml +++ b/flytestdlib/.golangci.yml @@ -1,34 +1,24 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - run: skip-dirs: - pkg/client - linters: disable-all: true enable: - - deadcode - errcheck - - gas + - gosec - gci - goconst - goimports - - golint - gosimple - govet - ineffassign - misspell - nakedret - staticcheck - - structcheck - typecheck - unconvert - unused - - varcheck - + - protogetter linters-settings: gci: custom-order: true @@ -37,3 +27,5 @@ linters-settings: - default - prefix(github.com/flyteorg) skip-generated: true + goconst: + ignore-tests: true diff --git a/flytestdlib/bitarray/bitset.go b/flytestdlib/bitarray/bitset.go index 883b9ded65..be957fecb3 100644 --- a/flytestdlib/bitarray/bitset.go +++ b/flytestdlib/bitarray/bitset.go @@ -14,6 +14,7 @@ type BitSet []Block // Ensures that the given bit is set in the BitSet. func (s *BitSet) Set(i uint) { + // #nosec G115 if len(*s) < int(i/blockSize+1) { *s = append(*s, make([]Block, i/blockSize+1)...) } @@ -23,6 +24,7 @@ func (s *BitSet) Set(i uint) { // Ensures that the given bit is cleared (unset) in the BitSet. func (s *BitSet) Clear(i uint) { + // #nosec G115 if len(*s) >= int(i/blockSize+1) { (*s)[i/blockSize] &^= 1 << (i % blockSize) } @@ -30,6 +32,7 @@ func (s *BitSet) Clear(i uint) { // Returns true if the given bit is set, false if it is cleared. func (s *BitSet) IsSet(i uint) bool { + // #nosec G115 if len(*s) < int(i/blockSize+1) { return false } @@ -44,7 +47,8 @@ func (s *BitSet) BlockCount() int { // Returns the length of the BitSet. func (s *BitSet) Cap() uint { - return uint(s.BlockCount()) * blockSize + return uint(s.BlockCount()) * blockSize // #nosec G115 + } // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. diff --git a/flytestdlib/bitarray/bitset_test.go b/flytestdlib/bitarray/bitset_test.go index 72e91f70d0..60572fbe3c 100644 --- a/flytestdlib/bitarray/bitset_test.go +++ b/flytestdlib/bitarray/bitset_test.go @@ -58,11 +58,11 @@ func TestNewBitSet(t *testing.T) { func TestBitSet_Cap(t *testing.T) { t.Run("Cap == size", func(t *testing.T) { b := NewBitSet(blockSize * 5) - assert.Equal(t, int(blockSize*5), int(b.Cap())) + assert.Equal(t, int(blockSize*5), int(b.Cap())) // #nosec G115 }) t.Run("Cap > size", func(t *testing.T) { b := NewBitSet(blockSize*2 + 20) - assert.Equal(t, int(blockSize*3), int(b.Cap())) + assert.Equal(t, int(blockSize*3), int(b.Cap())) // #nosec G115 }) } diff --git a/flytestdlib/bitarray/compact_array.go b/flytestdlib/bitarray/compact_array.go index 827c8c8532..f2d562a786 100644 --- a/flytestdlib/bitarray/compact_array.go +++ b/flytestdlib/bitarray/compact_array.go @@ -39,13 +39,16 @@ func (a *CompactArray) validateValue(value Item) { func (a *CompactArray) SetItem(index int, value Item) { a.validateIndex(index) a.validateValue(value) - bitIndex := uint(index) * a.ItemSize + bitIndex := uint(index) * a.ItemSize // #nosec G115 x := Item(1) + // #nosec G115 for i := int(a.ItemSize - 1); i >= 0; i-- { if x&value != 0 { - a.BitSet.Set(bitIndex + uint(i)) + a.BitSet.Set(bitIndex + uint(i)) // #nosec G115 + } else { - a.BitSet.Clear(bitIndex + uint(i)) + a.BitSet.Clear(bitIndex + uint(i)) // #nosec G115 + } x <<= 1 @@ -55,10 +58,12 @@ func (a *CompactArray) SetItem(index int, value Item) { // Gets Item at provided index. func (a *CompactArray) GetItem(index int) Item { a.validateIndex(index) - bitIndex := uint(index) * a.ItemSize + bitIndex := uint(index) * a.ItemSize // #nosec G115 res := Item(0) x := Item(1) + // #nosec G115 for i := int(a.ItemSize - 1); i >= 0; i-- { + // #nosec G115 if a.BitSet.IsSet(bitIndex + uint(i)) { res |= x } @@ -72,8 +77,9 @@ func (a *CompactArray) GetItem(index int) Item { // Gets all items stored in the array. The size of the returned array matches the ItemsCount it was initialized with. func (a CompactArray) GetItems() []Item { res := make([]Item, 0, a.ItemsCount) + // #nosec G115 for i := 0; i < int(a.ItemsCount); i++ { - res = append(res, a.GetItem(i)) + res = append(res, a.GetItem(i)) // #nosec G115 } return res diff --git a/flytestdlib/bitarray/compact_array_test.go b/flytestdlib/bitarray/compact_array_test.go index 7d41ee7b41..8d69f839d9 100644 --- a/flytestdlib/bitarray/compact_array_test.go +++ b/flytestdlib/bitarray/compact_array_test.go @@ -32,6 +32,7 @@ func TestNewItemArray(t *testing.T) { arr, err := NewCompactArray(itemsCount, Item(1)<<(itemSize-1)) assert.NoError(t, err) + // #nosec G115 for i := 0; i < int(itemsCount); i++ { // Ensure inserted items is in the accepted range (0 -> 1<=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.1.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.1.0-pyhd8ed1ab_1.conda hash: - md5: 035d1d58677c13ec93122d9eb6b8803b - sha256: 6c84575fe0c3a860c7b6a52cb36dc548c838503c8da0f950a63a64c29b443937 + md5: 45b7bffac387dc2bfc6e7144344cae37 + sha256: 4625b42d9e91067f90a66574ffd8f8e029ecb86a7fd1bda617372aff2a526c08 category: main optional: false - name: absl-py @@ -65,11 +65,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.1.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.1.0-pyhd8ed1ab_1.conda hash: - md5: 035d1d58677c13ec93122d9eb6b8803b - sha256: 6c84575fe0c3a860c7b6a52cb36dc548c838503c8da0f950a63a64c29b443937 + md5: 45b7bffac387dc2bfc6e7144344cae37 + sha256: 4625b42d9e91067f90a66574ffd8f8e029ecb86a7fd1bda617372aff2a526c08 category: main optional: false - name: aiohttp @@ -117,11 +117,11 @@ package: platform: linux-64 dependencies: frozenlist: '>=1.1.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_1.conda hash: - md5: d1e1eb7e21a9e2c74279d87dafb68156 - sha256: 575c742e14c86575986dc867463582a970463da50b77264cdf54df74f5563783 + md5: d736bd1b8904d7593dce4893e58a7881 + sha256: 9c7b639ea0cc796ef46c57fa104ec1f2ed53cd11c063518869a5a9d7d3b0b2db category: main optional: false - name: aiosignal @@ -129,12 +129,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' frozenlist: '>=1.1.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_1.conda hash: - md5: d1e1eb7e21a9e2c74279d87dafb68156 - sha256: 575c742e14c86575986dc867463582a970463da50b77264cdf54df74f5563783 + md5: d736bd1b8904d7593dce4893e58a7881 + sha256: 9c7b639ea0cc796ef46c57fa104ec1f2ed53cd11c063518869a5a9d7d3b0b2db category: main optional: false - name: alabaster @@ -162,37 +162,37 @@ package: category: main optional: false - name: alembic - version: 1.13.3 + version: 1.14.0 manager: conda platform: linux-64 dependencies: importlib-metadata: '' importlib_resources: '' mako: '' - python: '>=3.8' + python: '>=3.9' sqlalchemy: '>=1.3.0' - typing-extensions: '>=4' - url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.13.3-pyhd8ed1ab_0.conda + typing_extensions: '>=4' + url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.14.0-pyhd8ed1ab_1.conda hash: - md5: c81dc0d6ce99cf5c46e8b27dc37b5a75 - sha256: 7e61183ef0476f6e568e7021ba2abe0a566aaf1b9263575838fec6cb50d5eb42 + md5: b54392a3894585367c9c87ea804e2fd1 + sha256: 732dbbcbb01b9049d7625d3adb989437700544bb883223fb0853cdf3a52f5bac category: main optional: false - name: alembic - version: 1.13.3 + version: 1.14.0 manager: conda platform: osx-arm64 dependencies: importlib-metadata: '' importlib_resources: '' mako: '' - python: '>=3.8' + python: '>=3.9' + typing_extensions: '>=4' sqlalchemy: '>=1.3.0' - typing-extensions: '>=4' - url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.13.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.14.0-pyhd8ed1ab_1.conda hash: - md5: c81dc0d6ce99cf5c46e8b27dc37b5a75 - sha256: 7e61183ef0476f6e568e7021ba2abe0a566aaf1b9263575838fec6cb50d5eb42 + md5: b54392a3894585367c9c87ea804e2fd1 + sha256: 732dbbcbb01b9049d7625d3adb989437700544bb883223fb0853cdf3a52f5bac category: main optional: false - name: altair @@ -218,56 +218,30 @@ package: manager: conda platform: osx-arm64 dependencies: - entrypoints: '' jinja2: '' + toolz: '' + entrypoints: '' + python: '>=3.7' + pandas: '>=0.18' jsonschema: '>=3.0' numpy: '>=0.18' - pandas: '>=0.18' - python: '>=3.7' - toolz: '' url: https://conda.anaconda.org/conda-forge/noarch/altair-4.2.2-pyhd8ed1ab_0.conda hash: md5: afca9c6a93335c55bbc84072011e86dc sha256: 5b36be4717e05b7c1f016d3534b7fe316381260ac2367a7815ff96fb88273deb category: main optional: false -- name: aniso8601 - version: 9.0.1 - manager: conda - platform: linux-64 - dependencies: - python: '>=2.7' - python-dateutil: '' - url: https://conda.anaconda.org/conda-forge/noarch/aniso8601-9.0.1-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 36fba1a639f2d24723c5480345b78553 - sha256: 201c040b6ee0045805a777f75f37a8648eb8dfd4725d62a4fcddc24d7d6c2a9f - category: main - optional: false -- name: aniso8601 - version: 9.0.1 - manager: conda - platform: osx-arm64 - dependencies: - python: '>=2.7' - python-dateutil: '' - url: https://conda.anaconda.org/conda-forge/noarch/aniso8601-9.0.1-pyhd8ed1ab_0.tar.bz2 - hash: - md5: 36fba1a639f2d24723c5480345b78553 - sha256: 201c040b6ee0045805a777f75f37a8648eb8dfd4725d62a4fcddc24d7d6c2a9f - category: main - optional: false - name: annotated-types version: 0.7.0 manager: conda platform: linux-64 dependencies: - python: '>=3.7' + python: '>=3.9' typing-extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_1.conda hash: - md5: 7e9f4612544c8edbfd6afad17f1bd045 - sha256: 668f0825b6c18e4012ca24a0070562b6ec801ebc7008228a428eb52b4038873f + md5: 2934f256a8acfe48f6ebb4fce6cde29c + sha256: e0ea1ba78fbb64f17062601edda82097fcf815012cf52bb704150a2668110d48 category: main optional: false - name: annotated-types @@ -275,12 +249,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' + python: '>=3.9' typing-extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/annotated-types-0.7.0-pyhd8ed1ab_1.conda hash: - md5: 7e9f4612544c8edbfd6afad17f1bd045 - sha256: 668f0825b6c18e4012ca24a0070562b6ec801ebc7008228a428eb52b4038873f + md5: 2934f256a8acfe48f6ebb4fce6cde29c + sha256: e0ea1ba78fbb64f17062601edda82097fcf815012cf52bb704150a2668110d48 category: main optional: false - name: ansicolors @@ -308,7 +282,7 @@ package: category: main optional: false - name: anyio - version: 4.6.2.post1 + version: 4.7.0 manager: conda platform: linux-64 dependencies: @@ -316,27 +290,40 @@ package: idna: '>=2.8' python: '>=3.9' sniffio: '>=1.1' - typing_extensions: '>=4.1' - url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda + typing_extensions: '>=4.5' + url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.7.0-pyhd8ed1ab_0.conda hash: - md5: 688697ec5e9588bdded167d19577625b - sha256: 4b54b7ce79d818e3cce54ae4d552dba51b7afac160ceecdefd04b3917a37c502 + md5: c88107912954a983c2caf25f7fd55158 + sha256: 687537ee3af30f8784986bf40cac30e88138770b16e51ca9850c9c23c09aeba1 category: main optional: false - name: anyio - version: 4.6.2.post1 + version: 4.7.0 manager: conda platform: osx-arm64 dependencies: - exceptiongroup: '>=1.0.2' - idna: '>=2.8' python: '>=3.9' sniffio: '>=1.1' - typing_extensions: '>=4.1' - url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.6.2.post1-pyhd8ed1ab_0.conda + idna: '>=2.8' + typing_extensions: '>=4.5' + exceptiongroup: '>=1.0.2' + url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.7.0-pyhd8ed1ab_0.conda + hash: + md5: c88107912954a983c2caf25f7fd55158 + sha256: 687537ee3af30f8784986bf40cac30e88138770b16e51ca9850c9c23c09aeba1 + category: main + optional: false +- name: aom + version: 3.9.1 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + libcxx: '>=16' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aom-3.9.1-h7bae524_0.conda hash: - md5: 688697ec5e9588bdded167d19577625b - sha256: 4b54b7ce79d818e3cce54ae4d552dba51b7afac160ceecdefd04b3917a37c502 + md5: 7adba36492a1bb22d98ffffe4f6fc6de + sha256: ec238f18ce8140485645252351a0eca9ef4f7a1c568a420f240a585229bc12ef category: main optional: false - name: aplus @@ -368,11 +355,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyhd8ed1ab_1.conda hash: - md5: 5f095bc6454094e96f146491fd03633b - sha256: ae9fb8f68281f84482f2c234379aa12405a9e365151d43af20b3ae1f17312111 + md5: f4e90937bbfc3a4a92539545a37bb448 + sha256: 5b9ef6d338525b332e17c3ed089ca2f53a5d74b7a7b432747d29c6466e39346d category: main optional: false - name: appdirs @@ -380,11 +367,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyhd8ed1ab_1.conda hash: - md5: 5f095bc6454094e96f146491fd03633b - sha256: ae9fb8f68281f84482f2c234379aa12405a9e365151d43af20b3ae1f17312111 + md5: f4e90937bbfc3a4a92539545a37bb448 + sha256: 5b9ef6d338525b332e17c3ed089ca2f53a5d74b7a7b432747d29c6466e39346d category: main optional: false - name: appnope @@ -392,11 +379,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/appnope-0.1.4-pyhd8ed1ab_1.conda hash: - md5: cc4834a9ee7cc49ce8d25177c47b10d8 - sha256: 45ae2d41f4a4dcf8707633d3d7ae376fc62f0c09b1d063c3049c3f6f8c911670 + md5: 54898d0f524c9dee622d44bbb081a8ab + sha256: 8f032b140ea4159806e4969a68b4a3c0a7cab1ad936eb958a2b5ffe5335e19bf category: main optional: false - name: argon2-cffi @@ -405,12 +392,12 @@ package: platform: linux-64 dependencies: argon2-cffi-bindings: '' - python: '>=3.7' + python: '>=3.9' typing-extensions: '' - url: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda hash: - md5: 3afef1f55a1366b4d3b6a0d92e2235e4 - sha256: 130766446f5507bd44df957b6b5c898a8bd98f024bb426ed6cb9ff1ad67fc677 + md5: a7ee488b71c30ada51c48468337b85ba + sha256: 7af62339394986bc470a7a231c7f37ad0173ffb41f6bc0e8e31b0be9e3b9d20f category: main optional: false - name: argon2-cffi @@ -418,13 +405,13 @@ package: manager: conda platform: osx-arm64 dependencies: - argon2-cffi-bindings: '' - python: '>=3.7' typing-extensions: '' - url: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_0.conda + argon2-cffi-bindings: '' + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/argon2-cffi-23.1.0-pyhd8ed1ab_1.conda hash: - md5: 3afef1f55a1366b4d3b6a0d92e2235e4 - sha256: 130766446f5507bd44df957b6b5c898a8bd98f024bb426ed6cb9ff1ad67fc677 + md5: a7ee488b71c30ada51c48468337b85ba + sha256: 7af62339394986bc470a7a231c7f37ad0173ffb41f6bc0e8e31b0be9e3b9d20f category: main optional: false - name: argon2-cffi-bindings @@ -463,13 +450,13 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' python-dateutil: '>=2.7.0' types-python-dateutil: '>=2.8.10' - url: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda hash: - md5: b77d8c2313158e6e461ca0efb1c2c508 - sha256: ff49825c7f9e29e09afa6284300810e7a8640d621740efb47c4541f4dc4969db + md5: 46b53236fdd990271b03c3978d4218a9 + sha256: c4b0bdb3d5dee50b60db92f99da3e4c524d5240aafc0a5fcc15e45ae2d1a3cd1 category: main optional: false - name: arrow @@ -477,13 +464,13 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' + python: '>=3.9' python-dateutil: '>=2.7.0' types-python-dateutil: '>=2.8.10' - url: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/arrow-1.3.0-pyhd8ed1ab_1.conda hash: - md5: b77d8c2313158e6e461ca0efb1c2c508 - sha256: ff49825c7f9e29e09afa6284300810e7a8640d621740efb47c4541f4dc4969db + md5: 46b53236fdd990271b03c3978d4218a9 + sha256: c4b0bdb3d5dee50b60db92f99da3e4c524d5240aafc0a5fcc15e45ae2d1a3cd1 category: main optional: false - name: asn1crypto @@ -511,57 +498,55 @@ package: category: main optional: false - name: astroid - version: 3.3.5 + version: 3.3.6 manager: conda platform: linux-64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - typing-extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.5-py39hf3d152e_0.conda + typing_extensions: '>=4.0.0' + url: https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.6-py39hf3d152e_0.conda hash: - md5: 17f6ae5c67288177d6ffe0b05f9022fd - sha256: eeff16253b2ad2da2874712ac91b08cc9a4deba2531b47699291f9f0a20c48df + md5: 2fad0cc8cbd5039d54f720dacf59640e + sha256: a78955212c8d2e0ceb0d324b5f56bd1f117e9347e384b296a71666802b80886f category: main optional: false - name: astroid - version: 3.3.5 + version: 3.3.6 manager: conda platform: osx-arm64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - typing-extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/astroid-3.3.5-py39h2804cbe_0.conda + typing_extensions: '>=4.0.0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/astroid-3.3.6-py39h2804cbe_0.conda hash: - md5: 759024e5ba478d393559e1c395d6c651 - sha256: b5fd282fe633e41bba6fa0bdbc07a3e56614f0a0107bd5f089efe8a4a796a273 + md5: bc239ba49c6bb9aae2a3fb157fbedd5b + sha256: 0056ef83fc4b85dc19d31259eaadcba4c1998eb25ce39bcee1dea8d6da740487 category: main optional: false - name: asttokens - version: 2.4.1 + version: 3.0.0 manager: conda platform: linux-64 dependencies: - python: '>=3.5' - six: '>=1.12.0' - url: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda hash: - md5: 5f25798dcefd8252ce5f9dc494d5f571 - sha256: 708168f026df19a0344983754d27d1f7b28bb21afc7b97a82f02c4798a3d2111 + md5: 8f587de4bcf981e26228f268df374a9b + sha256: 93b14414b3b3ed91e286e1cbe4e7a60c4e1b1c730b0814d1e452a8ac4b9af593 category: main optional: false - name: asttokens - version: 2.4.1 + version: 3.0.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' - six: '>=1.12.0' - url: https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda hash: - md5: 5f25798dcefd8252ce5f9dc494d5f571 - sha256: 708168f026df19a0344983754d27d1f7b28bb21afc7b97a82f02c4798a3d2111 + md5: 8f587de4bcf981e26228f268df374a9b + sha256: 93b14414b3b3ed91e286e1cbe4e7a60c4e1b1c730b0814d1e452a8ac4b9af593 category: main optional: false - name: astunparse @@ -595,12 +580,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' typing_extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_1.conda hash: - md5: 3d081de3a6ea9f894bbb585e8e3a4dcb - sha256: 7ed83731979fe5b046c157730e50af0e24454468bbba1ed8fc1a3107db5d7518 + md5: 40c673c7d585623b8f1ee650c8734eb6 + sha256: 344157f396dfdc929d1dff8fe010abe173cd168d22a56648583e616495f2929e category: main optional: false - name: async-lru @@ -608,12 +593,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' + python: '>=3.9' typing_extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/async-lru-2.0.4-pyhd8ed1ab_1.conda hash: - md5: 3d081de3a6ea9f894bbb585e8e3a4dcb - sha256: 7ed83731979fe5b046c157730e50af0e24454468bbba1ed8fc1a3107db5d7518 + md5: 40c673c7d585623b8f1ee650c8734eb6 + sha256: 344157f396dfdc929d1dff8fe010abe173cd168d22a56648583e616495f2929e category: main optional: false - name: async-timeout @@ -676,11 +661,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_1.conda hash: - md5: 6732fa52eb8e66e5afeb32db8701a791 - sha256: 28dba85a7e0f7fb57d7315e13f603d1e41b83c5b88aa2a602596b52c833a2ff8 + md5: 2018839db45c79654b57a924fcdd27d0 + sha256: 8488a116dffe204015a90b41982c0270534bd1070f44a00b316d59e4a79ae8c7 category: main optional: false - name: attrs @@ -688,11 +673,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_1.conda hash: - md5: 6732fa52eb8e66e5afeb32db8701a791 - sha256: 28dba85a7e0f7fb57d7315e13f603d1e41b83c5b88aa2a602596b52c833a2ff8 + md5: 2018839db45c79654b57a924fcdd27d0 + sha256: 8488a116dffe204015a90b41982c0270534bd1070f44a00b316d59e4a79ae8c7 category: main optional: false - name: aws-c-auth @@ -713,20 +698,20 @@ package: category: main optional: false - name: aws-c-auth - version: 0.7.31 + version: 0.8.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-cal: '>=0.7.4,<0.7.5.0a0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - aws-c-http: '>=0.8.10,<0.8.11.0a0' - aws-c-io: '>=0.14.18,<0.14.19.0a0' - aws-c-sdkutils: '>=0.1.19,<0.1.20.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.7.31-hc27b277_0.conda + aws-c-cal: '>=0.8.0,<0.8.1.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' + aws-c-http: '>=0.9.0,<0.9.1.0a0' + aws-c-io: '>=0.15.0,<0.15.1.0a0' + aws-c-sdkutils: '>=0.2.0,<0.2.1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.8.0-ha41d1bc_4.conda hash: - md5: f22f3582756570df9b0025b2b373b118 - sha256: 5a512985e65a0b9b60e54c5aa01bb8b3c4573663b32753d3e63da43eccf638f3 + md5: 13dbcfd30892c68443bab4b60c093233 + sha256: 798d85cc1d610baacca9938734d677fac774aaa1e4da80cea5e8de14e58c2487 category: main optional: false - name: aws-c-cal @@ -744,17 +729,17 @@ package: category: main optional: false - name: aws-c-cal - version: 0.7.4 + version: 0.8.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' openssl: '>=3.3.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.7.4-h41dd001_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.8.0-hfd083d3_0.conda hash: - md5: 3f2c1743ed973b58fd187b0c31861dd8 - sha256: 2167b44bc879fb9cb7aaf2ca8418c2f8764c82c8732a41c08616e3f70fc92224 + md5: d970a184e605231ea7a2a409252492c7 + sha256: 4a95a22cef111662b7f514a907df2fcb6af1c8156cb9bad405bca0f0591c12e3 category: main optional: false - name: aws-c-common @@ -770,15 +755,15 @@ package: category: main optional: false - name: aws-c-common - version: 0.9.28 + version: 0.9.31 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.9.28-hd74edd7_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.9.31-h7ab814d_0.conda hash: - md5: 8dc8711c903ab57ead8ce99b65625a95 - sha256: 4081ada22148dc500765aac106ed224829810fd5e5d6f942a842b0a40f53783e + md5: 37eded160015046030d7a68cb44fb3d2 + sha256: b79d2bccd06dec9a54243d617fb6e2436a930707666ba186bbbe047c46b84064 category: main optional: false - name: aws-c-compression @@ -795,16 +780,16 @@ package: category: main optional: false - name: aws-c-compression - version: 0.2.19 + version: 0.3.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.2.19-h41dd001_1.conda + aws-c-common: '>=0.9.31,<0.9.32.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.3.0-hfd083d3_0.conda hash: - md5: 98e9d9c62300fd87bee44d2a63792ee5 - sha256: d0a4362beb22aa4da126aab5ddadcb4bbde5032f407d7e4b03969a3d7e5f9cb2 + md5: f99bd4b035da8b98b0f6260f81767c97 + sha256: f340831f3ecc3f6a7a068933c518d092d22e05738f9bbc13d794886bc4059af2 category: main optional: false - name: aws-c-event-stream @@ -824,19 +809,19 @@ package: category: main optional: false - name: aws-c-event-stream - version: 0.4.3 + version: 0.5.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - aws-c-io: '>=0.14.18,<0.14.19.0a0' - aws-checksums: '>=0.1.20,<0.1.21.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' + aws-c-io: '>=0.15.0,<0.15.1.0a0' + aws-checksums: '>=0.2.0,<0.2.1.0a0' libcxx: '>=17' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.4.3-h40a8fc1_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.5.0-h159f268_2.conda hash: - md5: f3d15e195e0b4dc6db749398eb925ffe - sha256: 63c903dc4b708c0054287dbb5411de62067a181886657a515d96c0e6add173c1 + md5: 6b2f144e2205f4425b73232959a932c8 + sha256: 087cab48c19961c3ce59310f32d8eb87f77991612a9f58deead6d1ea911a1062 category: main optional: false - name: aws-c-http @@ -856,19 +841,19 @@ package: category: main optional: false - name: aws-c-http - version: 0.8.10 + version: 0.9.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-cal: '>=0.7.4,<0.7.5.0a0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - aws-c-compression: '>=0.2.19,<0.2.20.0a0' - aws-c-io: '>=0.14.18,<0.14.19.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.8.10-hf5a2c8c_0.conda + aws-c-cal: '>=0.8.0,<0.8.1.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' + aws-c-compression: '>=0.3.0,<0.3.1.0a0' + aws-c-io: '>=0.15.0,<0.15.1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.9.0-h8d4912c_3.conda hash: - md5: e4ba8aa0fb7dac95b0ea398a3229bf56 - sha256: dfdec013bf7c2e87c49bc61a4cb8b1e3b8bf21e7f592326e958f0bf224de21b7 + md5: dcbdd1db10775dfdc9eea1a8a85e48ed + sha256: ecdf54709d2f10f8e1d00956b9ccd08b2554f889a413e3c94befcdd6d2cd0c8b category: main optional: false - name: aws-c-io @@ -887,17 +872,17 @@ package: category: main optional: false - name: aws-c-io - version: 0.14.18 + version: 0.15.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-cal: '>=0.7.4,<0.7.5.0a0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.14.18-hc3cb426_12.conda + aws-c-cal: '>=0.8.0,<0.8.1.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.15.0-h1e7b4f6_2.conda hash: - md5: efdd67503fa663c31d51b399c8f4cc2e - sha256: 59c510b61aad4da05f17756d84e3b138c51a5f27a8466021587504368818f159 + md5: 2f0774e6aec67a0139de5a74ae0762f5 + sha256: 610dfbd6d37f9c64c8d1c88cafa8d0cbd577381bb65bb2a886f00d1d990de23e category: main optional: false - name: aws-c-mqtt @@ -916,18 +901,18 @@ package: category: main optional: false - name: aws-c-mqtt - version: 0.10.7 + version: 0.11.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - aws-c-http: '>=0.8.10,<0.8.11.0a0' - aws-c-io: '>=0.14.18,<0.14.19.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.10.7-h3acc7b9_0.conda + aws-c-common: '>=0.9.31,<0.9.32.0a0' + aws-c-http: '>=0.9.0,<0.9.1.0a0' + aws-c-io: '>=0.15.0,<0.15.1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.11.0-h27f15a1_2.conda hash: - md5: 832123f8f88fc311b0eb86b06890aff4 - sha256: ffb9600b4fa37dbee242eb300b22757b092943a82b56b9c0e3940ff3a0358809 + md5: 4fe86291476a548f63c19b39cc834566 + sha256: 7225d609b1626cf236192d5c694788f75bae806f6b51682965927aa3575dcca5 category: main optional: false - name: aws-c-s3 @@ -950,21 +935,21 @@ package: category: main optional: false - name: aws-c-s3 - version: 0.6.6 + version: 0.7.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-auth: '>=0.7.31,<0.7.32.0a0' - aws-c-cal: '>=0.7.4,<0.7.5.0a0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - aws-c-http: '>=0.8.10,<0.8.11.0a0' - aws-c-io: '>=0.14.18,<0.14.19.0a0' - aws-checksums: '>=0.1.20,<0.1.21.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.6.6-hd16c091_0.conda + aws-c-auth: '>=0.8.0,<0.8.1.0a0' + aws-c-cal: '>=0.8.0,<0.8.1.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' + aws-c-http: '>=0.9.0,<0.9.1.0a0' + aws-c-io: '>=0.15.0,<0.15.1.0a0' + aws-checksums: '>=0.2.0,<0.2.1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.7.0-hd60ad1a_5.conda hash: - md5: a4406babaa217f4d965c6cc52ef6520f - sha256: 0b3e2a1e4189faea5edaeb480d9ddcf6878efdc06f66ba6910dee4b4fb386b43 + md5: 2aaca2773a9f6c551858567e46f69adc + sha256: 380da5699b5f3a4a9714edddcac0ea8764ed24e576e0d40315f363ec8d36d4ca category: main optional: false - name: aws-c-sdkutils @@ -981,16 +966,16 @@ package: category: main optional: false - name: aws-c-sdkutils - version: 0.1.19 + version: 0.2.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.1.19-h41dd001_3.conda + aws-c-common: '>=0.9.31,<0.9.32.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.2.0-hfd083d3_0.conda hash: - md5: 53bd7f3e6723288f531387a892d01635 - sha256: b320a08973f22468fd816bb957947369381913ae045d33bd872d03ebabaa355f + md5: f70ebdc61d1fbf373cbe0e76befe54f7 + sha256: e2a0922dbf822a9357a5f0bd92bbec021cea704bfa3326abf613300828784955 category: main optional: false - name: aws-checksums @@ -1007,16 +992,16 @@ package: category: main optional: false - name: aws-checksums - version: 0.1.20 + version: 0.2.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.1.20-h41dd001_0.conda + aws-c-common: '>=0.9.31,<0.9.32.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.2.0-hfd083d3_0.conda hash: - md5: 7ba57aa81224959beb6235f46bd05338 - sha256: 23c99722a3b3fac35d78c70731d333e85332e86a0ffce8bf48a9223478d5ffea + md5: 442144f196dbd40a37d82a8e5c54cde5 + sha256: 70e563643c657a0cbcab3781180abfd9c60adef7d87da35e7669b03e7f9b7df0 category: main optional: false - name: aws-crt-cpp @@ -1042,25 +1027,25 @@ package: category: main optional: false - name: aws-crt-cpp - version: 0.28.3 + version: 0.29.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-auth: '>=0.7.31,<0.7.32.0a0' - aws-c-cal: '>=0.7.4,<0.7.5.0a0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - aws-c-event-stream: '>=0.4.3,<0.4.4.0a0' - aws-c-http: '>=0.8.10,<0.8.11.0a0' - aws-c-io: '>=0.14.18,<0.14.19.0a0' - aws-c-mqtt: '>=0.10.7,<0.10.8.0a0' - aws-c-s3: '>=0.6.6,<0.6.7.0a0' - aws-c-sdkutils: '>=0.1.19,<0.1.20.0a0' + aws-c-auth: '>=0.8.0,<0.8.1.0a0' + aws-c-cal: '>=0.8.0,<0.8.1.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' + aws-c-event-stream: '>=0.5.0,<0.5.1.0a0' + aws-c-http: '>=0.9.0,<0.9.1.0a0' + aws-c-io: '>=0.15.0,<0.15.1.0a0' + aws-c-mqtt: '>=0.11.0,<0.11.1.0a0' + aws-c-s3: '>=0.7.0,<0.7.1.0a0' + aws-c-sdkutils: '>=0.2.0,<0.2.1.0a0' libcxx: '>=17' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.28.3-h433f80b_6.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.29.0-h871d450_6.conda hash: - md5: e410ea6979eb3a603eb778cb4ba4ee19 - sha256: 88f08fae202172df62b0ffc370deb464098d9a4aff63039d71189421750455de + md5: ebf1cd2c8835053997dc907867ea0e9e + sha256: 8f70ab35880c7d9e7444069f6d5f626af162b887cf280a95c28bb7f1f66ca823 category: main optional: false - name: aws-sdk-cpp @@ -1088,127 +1073,125 @@ package: platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-c-common: '>=0.9.28,<0.9.29.0a0' - aws-c-event-stream: '>=0.4.3,<0.4.4.0a0' - aws-checksums: '>=0.1.20,<0.1.21.0a0' - aws-crt-cpp: '>=0.28.3,<0.28.4.0a0' - libcurl: '>=8.10.0,<9.0a0' + aws-c-common: '>=0.9.31,<0.9.32.0a0' + aws-c-event-stream: '>=0.5.0,<0.5.1.0a0' + aws-checksums: '>=0.2.0,<0.2.1.0a0' + aws-crt-cpp: '>=0.29.0,<0.29.1.0a0' + libcurl: '>=8.10.1,<9.0a0' libcxx: '>=17' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.3.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.407-h0455a66_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.407-h19709bb_6.conda hash: - md5: e189085758424fa0222292c98decb68f - sha256: a753df57869eb6814113fe4ae71b99965acf4f2fafc9237067ba84bb18b39933 + md5: d252877bc14d50e43cf2b349d0f70da4 + sha256: cb43b7d1145b9482f6e99f1a2e17e9eb9f46900ae0d51d748d6025b1649c0e0e category: main optional: false - name: azure-core-cpp - version: 1.13.0 + version: 1.14.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libcurl: '>=8.8.0,<9.0a0' - libcxx: '>=16' - openssl: '>=3.3.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.13.0-hd01fc5c_0.conda + libcurl: '>=8.10.1,<9.0a0' + libcxx: '>=17' + openssl: '>=3.3.2,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.14.0-hd50102c_0.conda hash: - md5: 2083f6313e623079db6ee67af00e6b27 - sha256: aff4af38416cf7a81c79e5a3b071ce5aa13ec48da28db0312bc1ebe62cf7273d + md5: f093a11dcf3cdcca010b20a818fcc6dc + sha256: f5b91329ed59ffc0be8747784c6e4cc7e56250c54032883a83bc11808ef6a87e category: main optional: false - name: azure-identity-cpp - version: 1.8.0 + version: 1.10.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - azure-core-cpp: '>=1.13.0,<1.13.1.0a0' - libcxx: '>=16' - openssl: '>=3.3.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-identity-cpp-1.8.0-h13ea094_2.conda + azure-core-cpp: '>=1.14.0,<1.14.1.0a0' + libcxx: '>=17' + openssl: '>=3.3.2,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-identity-cpp-1.10.0-hc602bab_0.conda hash: - md5: 383b72f2ee009992b21f4db08a708510 - sha256: 11b01715cae19390890f29ebb56d36d895feafd787ba929aa10b6ce712f3f4b9 + md5: d7b71593a937459f2d4b67e1a4727dc2 + sha256: bde446b916fff5150606f8ed3e6058ffc55a3aa72381e46f1ab346590b1ae40a category: main optional: false - name: azure-storage-blobs-cpp - version: 12.12.0 + version: 12.13.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - azure-core-cpp: '>=1.13.0,<1.13.1.0a0' - azure-storage-common-cpp: '>=12.7.0,<12.7.1.0a0' - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.12.0-hfde595f_0.conda + azure-core-cpp: '>=1.14.0,<1.14.1.0a0' + azure-storage-common-cpp: '>=12.8.0,<12.8.1.0a0' + libcxx: '>=17' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.13.0-h7585a09_1.conda hash: - md5: f2c935764fdacd0fafc05f975fd347e0 - sha256: f733f4acedd8bf1705c780e0828f0b83242ae7e72963aef60d12a7c5b3a8640d + md5: 704238ef05d46144dae2e6b5853df8bc + sha256: 08d52d130addc0fb55d5ba10d9fa483e39be25d69bac7f4c676c2c3069207590 category: main optional: false - name: azure-storage-common-cpp - version: 12.7.0 + version: 12.8.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - azure-core-cpp: '>=1.13.0,<1.13.1.0a0' - libcxx: '>=16' + azure-core-cpp: '>=1.14.0,<1.14.1.0a0' + libcxx: '>=17' libxml2: '>=2.12.7,<3.0a0' - openssl: '>=3.3.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.7.0-hcf3b6fd_1.conda + openssl: '>=3.3.2,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.8.0-h9ca1f76_1.conda hash: - md5: df7e01bcf8f3a9bfb0ab06778f915f29 - sha256: 3fdf9c0337c48706cffe2e4c761cdea4132fb6dbd1f144d969c28afd903cf256 + md5: 7a187cd7b1445afc80253bb186a607cc + sha256: 77ab04e8fe5636a2de9c718f72a43645f7502cd208868c8a91ffba385547d585 category: main optional: false - name: azure-storage-files-datalake-cpp - version: 12.11.0 + version: 12.12.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - azure-core-cpp: '>=1.13.0,<1.13.1.0a0' - azure-storage-blobs-cpp: '>=12.12.0,<12.12.1.0a0' - azure-storage-common-cpp: '>=12.7.0,<12.7.1.0a0' - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.11.0-h082e32e_1.conda + azure-core-cpp: '>=1.14.0,<1.14.1.0a0' + azure-storage-blobs-cpp: '>=12.13.0,<12.13.1.0a0' + azure-storage-common-cpp: '>=12.8.0,<12.8.1.0a0' + libcxx: '>=17' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-files-datalake-cpp-12.12.0-hcdd55da_1.conda hash: - md5: 16b05d31f626717668f01c01a970115f - sha256: 3c288dc1ae6bff9a1e21ab5196d13ab486850f61ec649a743a87bf9726901abf + md5: c49fbc5233fcbaa86391162ff1adef38 + sha256: f48523f8aa0b5b80f45a92f0556b388dd96f44ac2dc2f44a01d08c1822eec97d category: main optional: false - name: babel - version: 2.14.0 + version: 2.16.0 manager: conda platform: linux-64 dependencies: - python: '>=3.7' - pytz: '' - setuptools: '' - url: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda + python: '>=3.9' + pytz: '>=2015.7' + url: https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda hash: - md5: 9669586875baeced8fc30c0826c3270e - sha256: 8584e3da58e92b72641c89ff9b98c51f0d5dbe76e527867804cbdf03ac91d8e6 + md5: 3e23f7db93ec14c80525257d8affac28 + sha256: f6205d3a62e87447e06e98d911559be0208d824976d77ab092796c9176611fcb category: main optional: false - name: babel - version: 2.14.0 + version: 2.16.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - pytz: '' - setuptools: '' - url: https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda + python: '>=3.9' + pytz: '>=2015.7' + url: https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda hash: - md5: 9669586875baeced8fc30c0826c3270e - sha256: 8584e3da58e92b72641c89ff9b98c51f0d5dbe76e527867804cbdf03ac91d8e6 + md5: 3e23f7db93ec14c80525257d8affac28 + sha256: f6205d3a62e87447e06e98d911559be0208d824976d77ab092796c9176611fcb category: main optional: false - name: bcrypt - version: 4.2.0 + version: 4.2.1 manager: conda platform: linux-64 dependencies: @@ -1216,24 +1199,24 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/bcrypt-4.2.0-py39he612d8f_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/bcrypt-4.2.1-py39he612d8f_0.conda hash: - md5: 757f465c02344418d605d36eb36d29f3 - sha256: 853c9032cef7c08c3678388e950e0e75941b2e992a749087926e0700c697beb7 + md5: 542399b1319079c3387e150067210433 + sha256: fe527dc33e72918366d7512dc46b4be315f1025216a1a34ec3f0453bdafd2b43 category: main optional: false - name: bcrypt - version: 4.2.0 + version: 4.2.1 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/bcrypt-4.2.0-py39h9c3e640_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/bcrypt-4.2.1-py39hc40b5db_0.conda hash: - md5: 1b71595e78e633ac6eb415d3188b6eb1 - sha256: 2b40ee4482fd68ceaa0066fdca57554945de3ef9db4dae6b6326438d9db58d1e + md5: cb235e47ed590db604e581a673ec0359 + sha256: 5dc98760352be2fa4f23fba9e9036e73ec2475c59c9a2180641e28cd2999d198 category: main optional: false - name: beautifulsoup4 @@ -1241,12 +1224,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' + python: '>=3.9' soupsieve: '>=1.2' - url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda hash: - md5: 332493000404d8411859539a5a630865 - sha256: 7b05b2d0669029326c623b9df7a29fa49d1982a9e7e31b2fea34b4c9a4a72317 + md5: d48f7e9fdec44baf6d1da416fe402b04 + sha256: fca842ab7be052eea1037ebee17ac25cc79c626382dd2187b5c6e007b9d9f65f category: main optional: false - name: beautifulsoup4 @@ -1254,12 +1237,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' + python: '>=3.9' soupsieve: '>=1.2' - url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda hash: - md5: 332493000404d8411859539a5a630865 - sha256: 7b05b2d0669029326c623b9df7a29fa49d1982a9e7e31b2fea34b4c9a4a72317 + md5: d48f7e9fdec44baf6d1da416fe402b04 + sha256: fca842ab7be052eea1037ebee17ac25cc79c626382dd2187b5c6e007b9d9f65f category: main optional: false - name: bigquery-magics @@ -1289,17 +1272,17 @@ package: manager: conda platform: osx-arm64 dependencies: - db-dtypes: '>=0.3.0,<2.0.0dev' - google-cloud-bigquery-core: '>=3.13.0,<4.0.0dev' - ipykernel: '>=5.5.6' + python: '>=3.7' + pandas: '>=1.1.0' ipython: '>=7.23.1' + pyarrow: '>=3.0.0' ipywidgets: '>=7.7.1' + db-dtypes: '>=0.3.0,<2.0.0dev' packaging: '>=20.0.0' - pandas: '>=1.1.0' - pyarrow: '>=3.0.0' pydata-google-auth: '>=1.5.0' - python: '>=3.7' + google-cloud-bigquery-core: '>=3.13.0,<4.0.0dev' tqdm: '>=4.7.4,<5.0.0dev' + ipykernel: '>=5.5.6' url: https://conda.anaconda.org/conda-forge/noarch/bigquery-magics-0.4.0-pyhd8ed1ab_0.conda hash: md5: 5de6290744dda05d26ed5b40f09c97e4 @@ -1336,59 +1319,53 @@ package: category: main optional: false - name: bleach - version: 6.1.0 + version: 6.2.0 manager: conda platform: linux-64 dependencies: - packaging: '' - python: '>=3.6' - setuptools: '' - six: '>=1.9.0' + python: '>=3.9' webencodings: '' - url: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_1.conda hash: - md5: 0ed9d7c0e9afa7c025807a9a8136ea3e - sha256: 845e77ef495376c5c3c328ccfd746ca0ef1978150cae8eae61a300fe7755fb08 + md5: 707af59db75b066217403a8f00c1d826 + sha256: ffc8e4e53cd92aec0f0ea0bc9e28f5fd1b1e67bde46b0b298170e6fb78eecce1 category: main optional: false - name: bleach - version: 6.1.0 + version: 6.2.0 manager: conda platform: osx-arm64 dependencies: - packaging: '' - python: '>=3.6' - setuptools: '' - six: '>=1.9.0' webencodings: '' - url: https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_1.conda hash: - md5: 0ed9d7c0e9afa7c025807a9a8136ea3e - sha256: 845e77ef495376c5c3c328ccfd746ca0ef1978150cae8eae61a300fe7755fb08 + md5: 707af59db75b066217403a8f00c1d826 + sha256: ffc8e4e53cd92aec0f0ea0bc9e28f5fd1b1e67bde46b0b298170e6fb78eecce1 category: main optional: false - name: blinker - version: 1.8.2 + version: 1.9.0 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/blinker-1.8.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda hash: - md5: cf85c002319c15e9721934104aaa1137 - sha256: 8ca3cd8f78d0607df28c9f76adb9800348f8f2dc8aa49d188a995a0acdc4477d + md5: 42834439227a4551b939beeeb8a4b085 + sha256: f7efd22b5c15b400ed84a996d777b6327e5c402e79e3c534a7e086236f1eb2dc category: main optional: false - name: blinker - version: 1.8.2 + version: 1.9.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/blinker-1.8.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/blinker-1.9.0-pyhff2d567_0.conda hash: - md5: cf85c002319c15e9721934104aaa1137 - sha256: 8ca3cd8f78d0607df28c9f76adb9800348f8f2dc8aa49d188a995a0acdc4477d + md5: 42834439227a4551b939beeeb8a4b085 + sha256: f7efd22b5c15b400ed84a996d777b6327e5c402e79e3c534a7e086236f1eb2dc category: main optional: false - name: blosc @@ -1451,16 +1428,16 @@ package: manager: conda platform: osx-arm64 dependencies: - contourpy: '>=1.2' - jinja2: '>=2.9' + python: '>=3.9' numpy: '>=1.16' - packaging: '>=16.8' + pyyaml: '>=3.10' pandas: '>=1.2' pillow: '>=7.1.0' - python: '>=3.9' - pyyaml: '>=3.10' + jinja2: '>=2.9' + packaging: '>=16.8' tornado: '>=6.2' xyzservices: '>=2021.09.1' + contourpy: '>=1.2' url: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.4.2-pyhd8ed1ab_0.conda hash: md5: e704d0474c0155db9632bd740b6c9d17 @@ -1468,37 +1445,37 @@ package: category: main optional: false - name: boto3 - version: 1.35.40 + version: 1.35.77 manager: conda platform: linux-64 dependencies: - botocore: '>=1.35.40,<1.36.0' + botocore: '>=1.35.77,<1.36.0' jmespath: '>=0.7.1,<2.0.0' - python: '>=3.8' + python: '>=3.9' s3transfer: '>=0.10.0,<0.11.0' - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.40-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.77-pyhd8ed1ab_0.conda hash: - md5: daf559311bbe42d4cd1fe3bf6f2ea4f2 - sha256: f5857681cb2fc77957cc8459da979b2c3b9cd30a761b9728e8ecdaede39ed949 + md5: f61c24493a04473b7825a3e004fe565f + sha256: 577d44a447b36a6f9c025c7782b9e86a86975504161af38f0261cdc72a471a41 category: main optional: false - name: boto3 - version: 1.35.40 + version: 1.35.77 manager: conda platform: osx-arm64 dependencies: - botocore: '>=1.35.40,<1.36.0' + python: '>=3.9' jmespath: '>=0.7.1,<2.0.0' - python: '>=3.8' s3transfer: '>=0.10.0,<0.11.0' - url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.40-pyhd8ed1ab_0.conda + botocore: '>=1.35.77,<1.36.0' + url: https://conda.anaconda.org/conda-forge/noarch/boto3-1.35.77-pyhd8ed1ab_0.conda hash: - md5: daf559311bbe42d4cd1fe3bf6f2ea4f2 - sha256: f5857681cb2fc77957cc8459da979b2c3b9cd30a761b9728e8ecdaede39ed949 + md5: f61c24493a04473b7825a3e004fe565f + sha256: 577d44a447b36a6f9c025c7782b9e86a86975504161af38f0261cdc72a471a41 category: main optional: false - name: botocore - version: 1.35.40 + version: 1.35.78 manager: conda platform: linux-64 dependencies: @@ -1506,25 +1483,25 @@ package: python: '>=3.8' python-dateutil: '>=2.1,<3.0.0' urllib3: '>=1.25.4,<1.27' - url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.40-pyge38_1234567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.78-pyge38_1234567_0.conda hash: - md5: e021a01a5d84d3b13b1f5d505f4137ba - sha256: 8dcf9ad28de988b78cd34a27a77db2e7e323d2e6a9baae6ae9240e623ef6eee6 + md5: acedd4b2ab648ca77f61b6b23c8cbc79 + sha256: d05973b719f733dd641ef525cef800ad50706e07c3a2ac961a94e5584e5825e8 category: main optional: false - name: botocore - version: 1.35.40 + version: 1.35.78 manager: conda platform: osx-arm64 dependencies: - jmespath: '>=0.7.1,<2.0.0' python: '>=3.8' python-dateutil: '>=2.1,<3.0.0' + jmespath: '>=0.7.1,<2.0.0' urllib3: '>=1.25.4,<1.27' - url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.40-pyge38_1234567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.35.78-pyge38_1234567_0.conda hash: - md5: e021a01a5d84d3b13b1f5d505f4137ba - sha256: 8dcf9ad28de988b78cd34a27a77db2e7e323d2e6a9baae6ae9240e623ef6eee6 + md5: acedd4b2ab648ca77f61b6b23c8cbc79 + sha256: d05973b719f733dd641ef525cef800ad50706e07c3a2ac961a94e5584e5825e8 category: main optional: false - name: branca @@ -1533,11 +1510,11 @@ package: platform: linux-64 dependencies: jinja2: '>=3' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_1.conda hash: - md5: 5f1c719f1cac0aee5e6bd6ca7d54a7fa - sha256: 9f7df349cb5a8852804d5bb1f5f49e3076a55ac7229b9c114bb5f7461f497ba7 + md5: cb693b0e0836b9f92988b2c8ef371a5d + sha256: 2e4288e90b27b11e1e766c7a9fd4f307e047a7f771e4e6c8c1add7dbbae1a56c category: main optional: false - name: branca @@ -1545,12 +1522,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' jinja2: '>=3' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/branca-0.7.2-pyhd8ed1ab_1.conda hash: - md5: 5f1c719f1cac0aee5e6bd6ca7d54a7fa - sha256: 9f7df349cb5a8852804d5bb1f5f49e3076a55ac7229b9c114bb5f7461f497ba7 + md5: cb693b0e0836b9f92988b2c8ef371a5d + sha256: 2e4288e90b27b11e1e766c7a9fd4f307e047a7f771e4e6c8c1add7dbbae1a56c category: main optional: false - name: bravado @@ -1579,16 +1556,16 @@ package: manager: conda platform: osx-arm64 dependencies: - bravado-core: '>=5.16.1' - monotonic: '' - msgpack-python: '' - python: '>=3.6' - python-dateutil: '' pyyaml: '' - requests: '>=2.17' - simplejson: '' six: '' typing-extensions: '' + python-dateutil: '' + msgpack-python: '' + simplejson: '' + monotonic: '' + python: '>=3.6' + requests: '>=2.17' + bravado-core: '>=5.16.1' url: https://conda.anaconda.org/conda-forge/noarch/bravado-11.0.3-pyhd8ed1ab_0.tar.bz2 hash: md5: 1b39872834da607183326c0617c0538f @@ -1623,18 +1600,18 @@ package: manager: conda platform: osx-arm64 dependencies: - jsonref: '' - jsonschema: '>=2.5.1' - msgpack-python: '>=0.5.2' - python: '>=3.6' + requests: '' + pyyaml: '' + six: '' python-dateutil: '' pytz: '' - pyyaml: '' - requests: '' simplejson: '' - six: '' - swagger-spec-validator: '>=2.0.1' typing: '' + jsonref: '' + python: '>=3.6' + jsonschema: '>=2.5.1' + msgpack-python: '>=0.5.2' + swagger-spec-validator: '>=2.0.1' url: https://conda.anaconda.org/conda-forge/noarch/bravado-core-5.17.1-pyhd8ed1ab_0.tar.bz2 hash: md5: 7bfb3b4d72f85f1dbec1d230882991b9 @@ -1755,28 +1732,28 @@ package: category: main optional: false - name: c-ares - version: 1.34.2 + version: 1.34.3 manager: conda platform: linux-64 dependencies: - __glibc: '>=2.28,<3.0.a0' + __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.3-hb9d3cd8_1.conda hash: - md5: 2b780c0338fc0ffa678ac82c54af51fd - sha256: c2a515e623ac3e17a56027c06098fbd5ab47afefefbd386b4c21289f2ec55139 + md5: ee228789a85f961d14567252a03e725f + sha256: 732571ba6286dbccbf4c6450078a581b7a5620204faf876ff0ef282d77a6bfa8 category: main optional: false - name: c-ares - version: 1.34.2 + version: 1.34.3 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.2-h7ab814d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.34.3-h5505292_1.conda hash: - md5: 8a8cfc11064b521bc54bd2d8591cb137 - sha256: 24d53d27397f9c2f0c168992690b5ec1bd62593fb4fc1f1e906ab91b10fd06c3 + md5: fb72102e8a8f9bcd38e40af09ff41c42 + sha256: 6dfa83cbd9acc8671d439fe9c745a5716faf6cbadf2f1e18c841bcf86cbba5f2 category: main optional: false - name: ca-certificates @@ -1854,11 +1831,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_1.conda hash: - md5: 5bad039db72bd8f134a5cff3ebaa190d - sha256: 0abdbbfc2e9c21079a943f42a2dcd950b1a8093ec474fc017e83da0ec4e6cbf4 + md5: 0a99af03ccbd19113c1de1c7f16a53d2 + sha256: d67d8683b34d7074bf99683aa1d71b2e42cee44da70ccd02a2863fb2c3df005b category: main optional: false - name: cachetools @@ -1866,11 +1843,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/cachetools-5.5.0-pyhd8ed1ab_1.conda hash: - md5: 5bad039db72bd8f134a5cff3ebaa190d - sha256: 0abdbbfc2e9c21079a943f42a2dcd950b1a8093ec474fc017e83da0ec4e6cbf4 + md5: 0a99af03ccbd19113c1de1c7f16a53d2 + sha256: d67d8683b34d7074bf99683aa1d71b2e42cee44da70ccd02a2863fb2c3df005b category: main optional: false - name: cairo @@ -1902,25 +1879,25 @@ package: category: main optional: false - name: cairo - version: 1.18.0 + version: 1.18.2 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - fontconfig: '>=2.14.2,<3.0a0' + fontconfig: '>=2.15.0,<3.0a0' fonts-conda-ecosystem: '' freetype: '>=2.12.1,<3.0a0' icu: '>=75.1,<76.0a0' - libcxx: '>=16' - libglib: '>=2.80.3,<3.0a0' - libpng: '>=1.6.43,<1.7.0a0' + libcxx: '>=18' + libexpat: '>=2.6.4,<3.0a0' + libglib: '>=2.82.2,<3.0a0' + libpng: '>=1.6.44,<1.7.0a0' libzlib: '>=1.3.1,<2.0a0' - pixman: '>=0.43.4,<1.0a0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.0-hb4a6bf7_3.conda + pixman: '>=0.44.2,<1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/cairo-1.18.2-h6a3b0d2_1.conda hash: - md5: 08bd0752f3de8a2d8a35fd012f09531f - sha256: f7603b7f6ee7c6e07c23d77302420194f4ec1b8e8facfff2b6aab17c7988a102 + md5: 8e3666c3f6e2c3e57aa261ab103a3600 + sha256: 9a28344e806b89c87fda0cdabd2fb961e5d2ff97107dba25bac9f5dc57220cc3 category: main optional: false - name: certifi @@ -2021,33 +1998,16 @@ package: sha256: b91003bff71351a0132c84d69fbb5afcfa90e57d83f76a180c6a5a0289099fb1 category: main optional: false -- name: cfitsio - version: 4.4.1 +- name: charset-normalizer + version: 3.4.0 manager: conda - platform: osx-arm64 + platform: linux-64 dependencies: - __osx: '>=11.0' - bzip2: '>=1.0.8,<2.0a0' - libcurl: '>=8.10.1,<9.0a0' - libgfortran: 5.* - libgfortran5: '>=13.2.0' - libzlib: '>=1.3.1,<2.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/cfitsio-4.4.1-hd313823_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda hash: - md5: d87f4a6fb494463885683859648c9e3a - sha256: 1c3ca3b98086c276d0480549366a6695b7df4a7a98bf82942cb5d687bb3b1952 - category: main - optional: false -- name: charset-normalizer - version: 3.4.0 - manager: conda - platform: linux-64 - dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda - hash: - md5: a374efa97290b8799046df7c5ca17164 - sha256: 1873ac45ea61f95750cb0b4e5e675d1c5b3def937e80c7eebb19297f76810be8 + md5: 6581a17bba6b948bb60130026404a9d6 + sha256: 63022ee2c6a157a9f980250a66f54bdcdf5abee817348d0f9a74c2441a6fbf0e category: main optional: false - name: charset-normalizer @@ -2055,11 +2015,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda hash: - md5: a374efa97290b8799046df7c5ca17164 - sha256: 1873ac45ea61f95750cb0b4e5e675d1c5b3def937e80c7eebb19297f76810be8 + md5: 6581a17bba6b948bb60130026404a9d6 + sha256: 63022ee2c6a157a9f980250a66f54bdcdf5abee817348d0f9a74c2441a6fbf0e category: main optional: false - name: click @@ -2068,11 +2028,11 @@ package: platform: linux-64 dependencies: __unix: '' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda hash: - md5: f3ad426304898027fc619827ff428eca - sha256: f0016cbab6ac4138a429e28dbcb904a90305b34b3fe41a9b89d697c90401caec + md5: cb8e52f28f5e592598190c562e7b5bf1 + sha256: 1cd5fc6ccdd5141378e51252a7a3810b07fd5a7e6934a5b4a7eccba66566224b category: main optional: false - name: click @@ -2081,11 +2041,11 @@ package: platform: osx-arm64 dependencies: __unix: '' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda hash: - md5: f3ad426304898027fc619827ff428eca - sha256: f0016cbab6ac4138a429e28dbcb904a90305b34b3fe41a9b89d697c90401caec + md5: cb8e52f28f5e592598190c562e7b5bf1 + sha256: 1cd5fc6ccdd5141378e51252a7a3810b07fd5a7e6934a5b4a7eccba66566224b category: main optional: false - name: click-plugins @@ -2094,11 +2054,11 @@ package: platform: linux-64 dependencies: click: '>=3.0' - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda hash: - md5: 4fd2c6b53934bd7d96d1f3fdaf99b79f - sha256: ddef6e559dde6673ee504b0e29dd814d36e22b6b9b1f519fa856ee268905bf92 + md5: 82bea35e4dac4678ba623cf10e95e375 + sha256: e7e2371a2561fbda9d50deb895d56fb16ccefe54f6d81b35ba8f1d33d3cc6957 category: main optional: false - name: click-plugins @@ -2106,12 +2066,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' click: '>=3.0' - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda hash: - md5: 4fd2c6b53934bd7d96d1f3fdaf99b79f - sha256: ddef6e559dde6673ee504b0e29dd814d36e22b6b9b1f519fa856ee268905bf92 + md5: 82bea35e4dac4678ba623cf10e95e375 + sha256: e7e2371a2561fbda9d50deb895d56fb16ccefe54f6d81b35ba8f1d33d3cc6957 category: main optional: false - name: cligj @@ -2120,11 +2080,11 @@ package: platform: linux-64 dependencies: click: '>=4.0' - python: <4.0 - url: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 + python: '>=3.9,<4.0' + url: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda hash: - md5: a29b7c141d6b2de4bb67788a5f107734 - sha256: 97bd58f0cfcff56a0bcda101e26f7d936625599325beba3e3a1fa512dd7fc174 + md5: 55c7804f428719241a90b152016085a1 + sha256: 1a52ae1febfcfb8f56211d1483a1ac4419b0028b7c3e9e61960a298978a42396 category: main optional: false - name: cligj @@ -2132,12 +2092,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9,<4.0' click: '>=4.0' - python: <4.0 - url: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda hash: - md5: a29b7c141d6b2de4bb67788a5f107734 - sha256: 97bd58f0cfcff56a0bcda101e26f7d936625599325beba3e3a1fa512dd7fc174 + md5: 55c7804f428719241a90b152016085a1 + sha256: 1a52ae1febfcfb8f56211d1483a1ac4419b0028b7c3e9e61960a298978a42396 category: main optional: false - name: cloudpickle @@ -2146,10 +2106,10 @@ package: platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda hash: - md5: d1e8704eb346e1d4b86b5cc1a6fe99f2 - sha256: f29f75c793c3acb6df8565d77e4c3b23436e3647c9e1c562c55d1cb2ddaeaf05 + md5: c88ca2bb7099167912e3b26463fff079 + sha256: 5a33d0d3ef33121c546eaf78b3dac2141fc4d30bbaeb3959bbc66fcd5e99ced6 category: main optional: false - name: cloudpickle @@ -2158,10 +2118,10 @@ package: platform: osx-arm64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda hash: - md5: d1e8704eb346e1d4b86b5cc1a6fe99f2 - sha256: f29f75c793c3acb6df8565d77e4c3b23436e3647c9e1c562c55d1cb2ddaeaf05 + md5: c88ca2bb7099167912e3b26463fff079 + sha256: 5a33d0d3ef33121c546eaf78b3dac2141fc4d30bbaeb3959bbc66fcd5e99ced6 category: main optional: false - name: codespell @@ -2193,11 +2153,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda hash: - md5: 3faab06a954c2a04039983f2c4a50d99 - sha256: 2c1b2e9755ce3102bca8d69e8f26e4f087ece73f50418186aee7c74bef8e1698 + md5: 962b9857ee8e7018c22f2776ffa0b2d7 + sha256: ab29d57dc70786c1269633ba3dff20288b81664d3ff8d21af995742e2bb03287 category: main optional: false - name: colorama @@ -2205,11 +2165,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda hash: - md5: 3faab06a954c2a04039983f2c4a50d99 - sha256: 2c1b2e9755ce3102bca8d69e8f26e4f087ece73f50418186aee7c74bef8e1698 + md5: 962b9857ee8e7018c22f2776ffa0b2d7 + sha256: ab29d57dc70786c1269633ba3dff20288b81664d3ff8d21af995742e2bb03287 category: main optional: false - name: comm @@ -2217,12 +2177,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' + python: '>=3.9' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda hash: - md5: 948d84721b578d426294e17a02e24cbb - sha256: e923acf02708a8a0b591f3bce4bdc11c8e63b73198b99b35fe6cd96bfb6a0dbe + md5: 74673132601ec2b7fc592755605f4c1b + sha256: 7e87ef7c91574d9fac19faedaaee328a70f718c9b4ddadfdc0ba9ac021bd64af category: main optional: false - name: comm @@ -2230,12 +2190,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' + python: '>=3.9' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.2-pyhd8ed1ab_1.conda hash: - md5: 948d84721b578d426294e17a02e24cbb - sha256: e923acf02708a8a0b591f3bce4bdc11c8e63b73198b99b35fe6cd96bfb6a0dbe + md5: 74673132601ec2b7fc592755605f4c1b + sha256: 7e87ef7c91574d9fac19faedaaee328a70f718c9b4ddadfdc0ba9ac021bd64af category: main optional: false - name: commonmark @@ -2256,8 +2216,8 @@ package: manager: conda platform: osx-arm64 dependencies: - future: '>=0.14.0' python: '' + future: '>=0.14.0' url: https://conda.anaconda.org/conda-forge/noarch/commonmark-0.9.1-py_0.tar.bz2 hash: md5: 6aa0173c14befcd577ded130cf6f22f5 @@ -2291,69 +2251,69 @@ package: numpy: '>=1.23' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.0-py39h157d57c_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/contourpy-1.3.0-py39h85b62ae_2.conda hash: - md5: 70d9f24ec6ac32a99c510e4e5e41abbb - sha256: 2949c62240be9a451ee69ee77682ffc2b05c485663c87b0c99278e91c3e43d03 + md5: 78be56565acee571fc0f1343afde6306 + sha256: f35a6359e0e33f4df03558c1523b91e4c06dcb8a29e40ea35192dfa10fbae1b2 category: main optional: false - name: cpython - version: 3.9.20 + version: 3.9.21 manager: conda platform: linux-64 dependencies: - python: 3.9.20.* + python: 3.9.21.* python_abi: '*' - url: https://conda.anaconda.org/conda-forge/noarch/cpython-3.9.20-py39hd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/cpython-3.9.21-py39hd8ed1ab_1.conda hash: - md5: 972072c1613928f0bca7c4b1f6f43d40 - sha256: 8a7bf5edfe02aad7b50802788ac042643b196cdeaf47d4fa12b3bfef46ee6e5b + md5: 88c825b761db70b42004d12a14f125bf + sha256: e6550736e44b800cf7cbb5d4570a08b3b96efa02b90dbd499a26a0698d677436 category: main optional: false - name: cpython - version: 3.9.20 + version: 3.9.21 manager: conda platform: osx-arm64 dependencies: - python: 3.9.20.* python_abi: '*' - url: https://conda.anaconda.org/conda-forge/noarch/cpython-3.9.20-py39hd8ed1ab_1.conda + python: 3.9.21.* + url: https://conda.anaconda.org/conda-forge/noarch/cpython-3.9.21-py39hd8ed1ab_1.conda hash: - md5: 972072c1613928f0bca7c4b1f6f43d40 - sha256: 8a7bf5edfe02aad7b50802788ac042643b196cdeaf47d4fa12b3bfef46ee6e5b + md5: 88c825b761db70b42004d12a14f125bf + sha256: e6550736e44b800cf7cbb5d4570a08b3b96efa02b90dbd499a26a0698d677436 category: main optional: false - name: cryptography - version: 43.0.1 + version: 44.0.0 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' cffi: '>=1.12' libgcc: '>=13' - openssl: '>=3.3.2,<4.0a0' + openssl: '>=3.4.0,<4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/cryptography-43.0.1-py39h7170ec2_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/cryptography-44.0.0-py39h7170ec2_0.conda hash: - md5: 2c010b221b0caff001b51ef4ba740361 - sha256: 9c104c64a65383788372f3c708dc600bf2c3c43e2d759dbafbbcf05411f24f36 + md5: 777c9cd2d41d5f17823e5f50c938d36b + sha256: 01110d7a14d51d40b37479aeb71bdfac38503c2d6e3d5f0a08b600e2823ab170 category: main optional: false - name: cryptography - version: 43.0.1 + version: 44.0.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' cffi: '>=1.12' - openssl: '>=3.3.2,<4.0a0' + openssl: '>=3.4.0,<4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-43.0.1-py39hc182a1d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/cryptography-44.0.0-py39h558b0be_0.conda hash: - md5: cde481e6e97753f7aed995651a18f900 - sha256: 8c5ffbf102df90561df550931ee373488a029d333b0aeed4d9aabe5482a17c69 + md5: d578ca2b11c860a090f42580832fa59d + sha256: fc19111014635a46aa75cde01c98321a1b0d8f8b551836cdcdd18fdf02a1319b category: main optional: false - name: cuda-version @@ -2403,11 +2363,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda hash: - md5: 5cd86562580f274031ede6aa6aa24441 - sha256: f221233f21b1d06971792d491445fd548224641af9443739b4b7b6d5d72954a8 + md5: 44600c4667a319d67dbe0681fc0bc833 + sha256: 9827efa891e507a91a8a2acf64e210d2aff394e1cde432ad08e1f8c66b12293c category: main optional: false - name: cycler @@ -2415,11 +2375,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda hash: - md5: 5cd86562580f274031ede6aa6aa24441 - sha256: f221233f21b1d06971792d491445fd548224641af9443739b4b7b6d5d72954a8 + md5: 44600c4667a319d67dbe0681fc0bc833 + sha256: 9827efa891e507a91a8a2acf64e210d2aff394e1cde432ad08e1f8c66b12293c category: main optional: false - name: cyrus-sasl @@ -2496,18 +2456,18 @@ package: manager: conda platform: osx-arm64 dependencies: - bokeh: '>=2.4.2,!=3.0.*' - cytoolz: '>=0.11.0' - dask-core: '>=2024.8.0,<2024.8.1.0a0' - dask-expr: '>=1.1,<1.2' - distributed: '>=2024.8.0,<2024.8.1.0a0' - jinja2: '>=2.10.3' - lz4: '>=4.3.2' + pyarrow-hotfix: '' + python: '>=3.9' numpy: '>=1.21' + jinja2: '>=2.10.3' pandas: '>=2.0' pyarrow: '>=7.0' - pyarrow-hotfix: '' - python: '>=3.9' + cytoolz: '>=0.11.0' + lz4: '>=4.3.2' + bokeh: '>=2.4.2,!=3.0.*' + dask-expr: '>=1.1,<1.2' + dask-core: '>=2024.8.0,<2024.8.1.0a0' + distributed: '>=2024.8.0,<2024.8.1.0a0' url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.0-pyhd8ed1ab_0.conda hash: md5: 795f3557b117402208fe1e0e20d943ed @@ -2539,15 +2499,15 @@ package: manager: conda platform: osx-arm64 dependencies: - click: '>=8.1' - cloudpickle: '>=1.5.0' - fsspec: '>=2021.09.0' - importlib_metadata: '>=4.13.0' - packaging: '>=20.0' - partd: '>=1.4.0' python: '>=3.9' + packaging: '>=20.0' pyyaml: '>=5.3.1' + cloudpickle: '>=1.5.0' toolz: '>=0.10.0' + click: '>=8.1' + importlib_metadata: '>=4.13.0' + fsspec: '>=2021.09.0' + partd: '>=1.4.0' url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.0-pyhd8ed1ab_0.conda hash: md5: bf68bf9ff9a18f1b17aa8c817225aee0 @@ -2574,10 +2534,10 @@ package: manager: conda platform: osx-arm64 dependencies: - dask-core: 2024.8.0 - pandas: '>=2' pyarrow: '' python: '>=3.9' + pandas: '>=2' + dask-core: 2024.8.0 url: https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.10-pyhd8ed1ab_0.conda hash: md5: 88efd31bf04d9f7a2ac7d02ab568d37d @@ -2585,31 +2545,31 @@ package: category: main optional: false - name: databricks-sdk - version: 0.34.0 + version: 0.38.0 manager: conda platform: linux-64 dependencies: google-auth: '>=2.0,<3' python: '>=3.7' requests: '>=2.28.1,<3' - url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.34.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.38.0-pyhd8ed1ab_0.conda hash: - md5: fd4135092dfb5b0735c9ebdf61b6d9b7 - sha256: 6c4bc53d7c38a3d928e24327a43403e633fdad62dfdb44a81f38d9c2dce553eb + md5: 1a76130b86eceedd30f5f192afa629b8 + sha256: f8d1155a2104d614fa88d3c3ac0aa8f54937b5fa7ee0266c464e67c2243e8f9f category: main optional: false - name: databricks-sdk - version: 0.34.0 + version: 0.38.0 manager: conda platform: osx-arm64 dependencies: - google-auth: '>=2.0,<3' python: '>=3.7' requests: '>=2.28.1,<3' - url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.34.0-pyhd8ed1ab_0.conda + google-auth: '>=2.0,<3' + url: https://conda.anaconda.org/conda-forge/noarch/databricks-sdk-0.38.0-pyhd8ed1ab_0.conda hash: - md5: fd4135092dfb5b0735c9ebdf61b6d9b7 - sha256: 6c4bc53d7c38a3d928e24327a43403e633fdad62dfdb44a81f38d9c2dce553eb + md5: 1a76130b86eceedd30f5f192afa629b8 + sha256: f8d1155a2104d614fa88d3c3ac0aa8f54937b5fa7ee0266c464e67c2243e8f9f category: main optional: false - name: datasets @@ -2643,29 +2603,40 @@ package: manager: conda platform: osx-arm64 dependencies: - aiohttp: '' - dill: '>=0.3.0,<0.3.8' - fsspec: '>=2021.11.1' - huggingface_hub: '>=0.14.0,<1.0.0' + pandas: '' + packaging: '' importlib-metadata: '' + aiohttp: '' + python-xxhash: '' multiprocess: '' + pyyaml: '>=5.1' numpy: '>=1.17' - packaging: '' - pandas: '' pyarrow: '>=8.0.0' python: '>=3.8.0' - python-xxhash: '' - pyyaml: '>=5.1' requests: '>=2.19.0' tqdm: '>=4.62.1' + fsspec: '>=2021.11.1' + dill: '>=0.3.0,<0.3.8' + huggingface_hub: '>=0.14.0,<1.0.0' url: https://conda.anaconda.org/conda-forge/noarch/datasets-2.14.4-pyhd8ed1ab_0.conda hash: md5: 3e087f072ce03c43a9b60522f5d0ca2f sha256: 7e09bd083a609138b780fcc4535924cb96814d2c908a36d4c64a2ba9ee3efe7f category: main optional: false +- name: dav1d + version: 1.2.1 + manager: conda + platform: osx-arm64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/osx-arm64/dav1d-1.2.1-hb547adb_0.conda + hash: + md5: 5a74cdee497e6b65173e10d94582fae6 + sha256: 93e077b880a85baec8227e8c72199220c7f87849ad32d02c14fb3807368260b8 + category: main + optional: false - name: db-dtypes - version: 1.2.0 + version: 1.3.1 manager: conda platform: linux-64 dependencies: @@ -2673,31 +2644,31 @@ package: packaging: '>=17.0' pandas: '>=0.24.2' pyarrow: '>=3.0.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/db-dtypes-1.2.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/db-dtypes-1.3.1-pyhff2d567_0.conda hash: - md5: d7dbb7a600bb820b5b7874b3a2a87990 - sha256: f96091a81a3dbef3ef27e9860dd220c4f87ed6b1791b56f0096b7054c4130d7a + md5: ed9b006a5aa3797aa8b53c807f94c55b + sha256: c1ad0ad07a66564178fb3b8fdd4aac397c59be5509ca82df5e10e945ff5ebf77 category: main optional: false - name: db-dtypes - version: 1.2.0 + version: 1.3.1 manager: conda platform: osx-arm64 dependencies: - numpy: '>=1.16.6' - packaging: '>=17.0' + python: '>=3.9' pandas: '>=0.24.2' + packaging: '>=17.0' pyarrow: '>=3.0.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/db-dtypes-1.2.0-pyhd8ed1ab_0.conda + numpy: '>=1.16.6' + url: https://conda.anaconda.org/conda-forge/noarch/db-dtypes-1.3.1-pyhff2d567_0.conda hash: - md5: d7dbb7a600bb820b5b7874b3a2a87990 - sha256: f96091a81a3dbef3ef27e9860dd220c4f87ed6b1791b56f0096b7054c4130d7a + md5: ed9b006a5aa3797aa8b53c807f94c55b + sha256: c1ad0ad07a66564178fb3b8fdd4aac397c59be5509ca82df5e10e945ff5ebf77 category: main optional: false - name: debugpy - version: 1.8.7 + version: 1.8.9 manager: conda platform: linux-64 dependencies: @@ -2706,25 +2677,25 @@ package: libstdcxx: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.7-py39hf88036b_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/debugpy-1.8.9-py39hf88036b_0.conda hash: - md5: 0eb24d05b0dc1b6e386804a975c3cd76 - sha256: 23163dcf53d5651662625773777744e613696caf16a81fe38f1c4e3476f9437e + md5: 4635fc2ad95f09017ce0ad09824f5c52 + sha256: faee96f5916355b992bd1da2269da9a0fdbe123c0048371fd93ef9600bbc4559 category: main optional: false - name: debugpy - version: 1.8.7 + version: 1.8.9 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libcxx: '>=17' + libcxx: '>=18' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.7-py39hfa9831e_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/debugpy-1.8.9-py39h941272d_0.conda hash: - md5: 9b28055826ed435d772de27b4a8ee631 - sha256: 54e8d22abc692f5da271fa803d687ab8bf9092f5f0fde585f0a664621cc51a89 + md5: 786d113be847312a953b089432e83ea9 + sha256: 752ee6991626b8c6f2e1b2558bf55d03e23fd3341347b0295cb233cafcf7a4a4 category: main optional: false - name: decorator @@ -2732,11 +2703,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_1.conda hash: - md5: 43afe5ab04e35e17ba28649471dd7364 - sha256: 328a6a379f9bdfd0230e51de291ce858e6479411ea4b0545fb377c71662ef3e2 + md5: d622d8d7ee8868870f9cbe259f381181 + sha256: 84e5120c97502a3785e8c3241c3bf51f64b4d445f13b4d2445db00d9816fe479 category: main optional: false - name: decorator @@ -2744,11 +2715,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_1.conda hash: - md5: 43afe5ab04e35e17ba28649471dd7364 - sha256: 328a6a379f9bdfd0230e51de291ce858e6479411ea4b0545fb377c71662ef3e2 + md5: d622d8d7ee8868870f9cbe259f381181 + sha256: 84e5120c97502a3785e8c3241c3bf51f64b4d445f13b4d2445db00d9816fe479 category: main optional: false - name: defusedxml @@ -2776,29 +2747,29 @@ package: category: main optional: false - name: deprecated - version: 1.2.14 + version: 1.2.15 manager: conda platform: linux-64 dependencies: - python: '>=2.7' + python: '>=3.9' wrapt: <2,>=1.10 - url: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.14-pyh1a96a4e_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.15-pyhd8ed1ab_1.conda hash: - md5: 4e4c4236e1ca9bcd8816b921a4805882 - sha256: 8f61539b00ea315c99f8b6f9e2408caa6894593617676741214cc0280e875ca0 + md5: eaef2e94d5bd76f758545d172c1fda67 + sha256: a20ebf2c9b02a6eb32412ceb5c4cffaae49417db7e75414a76417538293a9402 category: main optional: false - name: deprecated - version: 1.2.14 + version: 1.2.15 manager: conda platform: osx-arm64 dependencies: - python: '>=2.7' + python: '>=3.9' wrapt: <2,>=1.10 - url: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.14-pyh1a96a4e_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/deprecated-1.2.15-pyhd8ed1ab_1.conda hash: - md5: 4e4c4236e1ca9bcd8816b921a4805882 - sha256: 8f61539b00ea315c99f8b6f9e2408caa6894593617676741214cc0280e875ca0 + md5: eaef2e94d5bd76f758545d172c1fda67 + sha256: a20ebf2c9b02a6eb32412ceb5c4cffaae49417db7e75414a76417538293a9402 category: main optional: false - name: dill @@ -2830,11 +2801,11 @@ package: manager: conda platform: linux-64 dependencies: - python: 2.7|>=3.6 - url: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda hash: - md5: fe521c1608280cc2803ebd26dc252212 - sha256: 300b2e714f59403df0560174f5ef6c19db8b4a3b74a7244862cf771f07dee8fb + md5: 8d88f4a2242e6b96f9ecff9a6a05b2f1 + sha256: 0e160c21776bd881b79ce70053e59736f51036784fa43a50da10a04f0c1b9c45 category: main optional: false - name: distlib @@ -2842,11 +2813,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: 2.7|>=3.6 - url: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda hash: - md5: fe521c1608280cc2803ebd26dc252212 - sha256: 300b2e714f59403df0560174f5ef6c19db8b4a3b74a7244862cf771f07dee8fb + md5: 8d88f4a2242e6b96f9ecff9a6a05b2f1 + sha256: 0e160c21776bd881b79ce70053e59736f51036784fa43a50da10a04f0c1b9c45 category: main optional: false - name: distributed @@ -2882,23 +2853,23 @@ package: manager: conda platform: osx-arm64 dependencies: - click: '>=8.0' - cloudpickle: '>=1.5.0' - cytoolz: '>=0.10.1' - dask-core: '>=2024.8.0,<2024.8.1.0a0' - jinja2: '>=2.10.3' - locket: '>=1.0.0' - msgpack-python: '>=1.0.0' - packaging: '>=20.0' - psutil: '>=5.7.2' python: '>=3.9' + packaging: '>=20.0' pyyaml: '>=5.3.1' - sortedcontainers: '>=2.0.5' - tblib: '>=1.6.0' + cloudpickle: '>=1.5.0' + click: '>=8.0' + msgpack-python: '>=1.0.0' toolz: '>=0.10.0' - tornado: '>=6.0.4' + jinja2: '>=2.10.3' urllib3: '>=1.24.3' + tblib: '>=1.6.0' + locket: '>=1.0.0' + tornado: '>=6.0.4' + sortedcontainers: '>=2.0.5' + psutil: '>=5.7.2' + cytoolz: '>=0.10.1' zict: '>=3.0.0' + dask-core: '>=2024.8.0,<2024.8.1.0a0' url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.0-pyhd8ed1ab_0.conda hash: md5: f9a7fbaeb79d4b57d1ed742930b4eec4 @@ -2951,12 +2922,12 @@ package: manager: conda platform: osx-arm64 dependencies: - paramiko: '>=2.4.3' - python: '>=3.8' pywin32-on-windows: '' + python: '>=3.8' requests: '>=2.26.0' urllib3: '>=1.26.0' websocket-client: '>=0.32.0' + paramiko: '>=2.4.3' url: https://conda.anaconda.org/conda-forge/noarch/docker-py-7.1.0-pyhd8ed1ab_0.conda hash: md5: 3e547e36de765ca8f28a7623fb3f255a @@ -2995,10 +2966,10 @@ package: platform: linux-64 dependencies: python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda hash: - md5: e8cd5d629f65bdf0f3bb312cde14659e - sha256: 362bfe3afaac18298c48c0c6a935641544077ce5105a42a2d8ebe750ad07c574 + md5: 24c1ca34138ee57de72a943237cde4cc + sha256: fa5966bb1718bbf6967a85075e30e4547901410cc7cb7b16daf68942e9a94823 category: main optional: false - name: docutils @@ -3007,10 +2978,10 @@ package: platform: osx-arm64 dependencies: python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda hash: - md5: e8cd5d629f65bdf0f3bb312cde14659e - sha256: 362bfe3afaac18298c48c0c6a935641544077ce5105a42a2d8ebe750ad07c574 + md5: 24c1ca34138ee57de72a943237cde4cc + sha256: fa5966bb1718bbf6967a85075e30e4547901410cc7cb7b16daf68942e9a94823 category: main optional: false - name: durationpy @@ -3042,11 +3013,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda hash: - md5: 3cf04868fee0a029769bd41f4b2fbf2d - sha256: 2ec4a0900a4a9f42615fc04d0fb3286b796abe56590e8e042f6ec25e102dd5af + md5: 3366592d3c219f2731721f11bc93755c + sha256: 80f579bfc71b3dab5bef74114b89e26c85cb0df8caf4c27ab5ffc16363d57ee7 category: main optional: false - name: entrypoints @@ -3054,11 +3025,35 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda + hash: + md5: 3366592d3c219f2731721f11bc93755c + sha256: 80f579bfc71b3dab5bef74114b89e26c85cb0df8caf4c27ab5ffc16363d57ee7 + category: main + optional: false +- name: eval_type_backport + version: 0.2.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/eval_type_backport-0.2.0-pyha770c72_0.conda + hash: + md5: 710a87253b84b8e6e7f00d071182e43c + sha256: 23b9fd894570b2bbd0d979e6065dbd9633e62e456d22e526725706d7bd4977e1 + category: main + optional: false +- name: eval_type_backport + version: 0.2.0 + manager: conda + platform: osx-arm64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/eval_type_backport-0.2.0-pyha770c72_0.conda hash: - md5: 3cf04868fee0a029769bd41f4b2fbf2d - sha256: 2ec4a0900a4a9f42615fc04d0fb3286b796abe56590e8e042f6ec25e102dd5af + md5: 710a87253b84b8e6e7f00d071182e43c + sha256: 23b9fd894570b2bbd0d979e6065dbd9633e62e456d22e526725706d7bd4977e1 category: main optional: false - name: exceptiongroup @@ -3066,11 +3061,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda hash: - md5: d02ae936e42063ca46af6cdad2dbd1e0 - sha256: e0edd30c4b7144406bb4da975e6bb97d6bc9c0e999aa4efe66ae108cada5d5b5 + md5: a16662747cdeb9abbac74d0057cc976e + sha256: cbde2c64ec317118fc06b223c5fd87c8a680255e7348dd60e7b292d2e103e701 category: main optional: false - name: exceptiongroup @@ -3078,11 +3073,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda hash: - md5: d02ae936e42063ca46af6cdad2dbd1e0 - sha256: e0edd30c4b7144406bb4da975e6bb97d6bc9c0e999aa4efe66ae108cada5d5b5 + md5: a16662747cdeb9abbac74d0057cc976e + sha256: cbde2c64ec317118fc06b223c5fd87c8a680255e7348dd60e7b292d2e103e701 category: main optional: false - name: executing @@ -3090,11 +3085,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=2.7' - url: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda hash: - md5: d0441db20c827c11721889a241df1220 - sha256: a52d7516e2e11d3eb10908e10d3eb3f8ef267fea99ed9b09d52d96c4db3441b8 + md5: ef8b5fca76806159fc25b4f48d8737eb + sha256: 28d25ea375ebab4bf7479228f8430db20986187b04999136ff5c722ebd32eb60 category: main optional: false - name: executing @@ -3102,38 +3097,25 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=2.7' - url: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda hash: - md5: d0441db20c827c11721889a241df1220 - sha256: a52d7516e2e11d3eb10908e10d3eb3f8ef267fea99ed9b09d52d96c4db3441b8 + md5: ef8b5fca76806159fc25b4f48d8737eb + sha256: 28d25ea375ebab4bf7479228f8430db20986187b04999136ff5c722ebd32eb60 category: main optional: false - name: expat - version: 2.6.3 + version: 2.6.4 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' - libexpat: 2.6.3 + libexpat: 2.6.4 libgcc: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda - hash: - md5: 6595440079bed734b113de44ffd3cd0a - sha256: 65bd479c75ce876f26600cb230d6ebc474086e31fa384af9b4282b36842ed7e2 - category: main - optional: false -- name: expat - version: 2.6.3 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libexpat: 2.6.3 - url: https://conda.anaconda.org/conda-forge/osx-arm64/expat-2.6.3-hf9b8971_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda hash: - md5: 726bbcf3549fe22b4556285d946fed2d - sha256: 4d52ad7a7eb39f71a38bbf2b6377183024bd3bf4cfb5dcd33b31636a6f9a7abc + md5: 1d6afef758879ef5ee78127eb4cd2c4a + sha256: 1848c7db9e264e3b8036ee133d570dd880422983cd20dd9585a505289606d276 category: main optional: false - name: fastavro @@ -3145,10 +3127,10 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/fastavro-1.9.7-py39h8cd3c5a_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/fastavro-1.9.7-py39h8cd3c5a_3.conda hash: - md5: a8247f20f35f24945dbc10a96236835a - sha256: 999b730a1093324c5a5092fe3d71d8c2a9a8a59750cf18c9acf47d7d79557e78 + md5: 4db59fb76f45e1cc839628a43b779be8 + sha256: 2b7bdf0d17aa8d96aac9fb5a206442dbd0540cb8f1d707d4d657763f7fb96184 category: main optional: false - name: fastavro @@ -3159,10 +3141,10 @@ package: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/fastavro-1.9.7-py39h57695bc_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/fastavro-1.9.7-py39h57695bc_3.conda hash: - md5: 38d4b97b1be68c54b48be090c28b7926 - sha256: cf8ebabede71428f8528267292ad28de00c2d8e76d87895aab696d5e0dba4f22 + md5: 3fd231f9ae4d4d72f3836760f738719a + sha256: 1a9c25ae03e08fcb4c376ee02f2be556f2f2d0051abb7bbc404f457be9202388 category: main optional: false - name: filelock @@ -3170,11 +3152,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda hash: - md5: 916f8ec5dd4128cd5f207a3c4c07b2c6 - sha256: 1da766da9dba05091af87977922fe60dc7464091a9ccffb3765d403189d39be4 + md5: d692e9ba6f92dc51484bf3477e36ce7c + sha256: 18dca6e2194732df7ebf824abaefe999e4765ebe8e8a061269406ab88fc418b9 category: main optional: false - name: filelock @@ -3182,11 +3164,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda hash: - md5: 916f8ec5dd4128cd5f207a3c4c07b2c6 - sha256: 1da766da9dba05091af87977922fe60dc7464091a9ccffb3765d403189d39be4 + md5: d692e9ba6f92dc51484bf3477e36ce7c + sha256: 18dca6e2194732df7ebf824abaefe999e4765ebe8e8a061269406ab88fc418b9 category: main optional: false - name: fiona @@ -3225,55 +3207,53 @@ package: click: '>=8.0,<9.dev0' click-plugins: '>=1.0' cligj: '>=0.5' - gdal: '' importlib-metadata: '' - libcxx: '>=17' - libgdal: '>=3.9.2,<3.10.0a0' - libgdal-core: '>=3.9.2,<3.10.0a0' + libcxx: '>=18' + libgdal-core: '>=3.10.0,<3.11.0a0' pyparsing: '' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* shapely: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/fiona-1.10.1-py39h5942dda_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/fiona-1.10.1-py39h77209e7_3.conda hash: - md5: 1f5d5a75da0a002e8203889b168b5a36 - sha256: a2e7c300d61edaddb605189f520e1ad7794ac1e1bae02e504b60604aea6d3d86 + md5: 843f2f43f1c94c1c844d15dbc7023392 + sha256: 8d3ef517a5b7d20d64b3dc9e44bf9f2259f3e7a3e67b69c1ec35a75c3239aa44 category: main optional: false - name: flask - version: 3.0.3 + version: 3.1.0 manager: conda platform: linux-64 dependencies: - blinker: '>=1.6.2' + blinker: '>=1.9' click: '>=8.1.3' - importlib-metadata: '>=3.6.0' - itsdangerous: '>=2.1.2' + importlib-metadata: '>=3.6' + itsdangerous: '>=2.2' jinja2: '>=3.1.2' - python: '>=3.8' - werkzeug: '>=3.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/flask-3.0.3-pyhd8ed1ab_0.conda + python: '>=3.9' + werkzeug: '>=3.1' + url: https://conda.anaconda.org/conda-forge/noarch/flask-3.1.0-pyhff2d567_0.conda hash: - md5: dcdb937144fa20d7757bf512db1ea769 - sha256: 2fc508f656fe52cb2f9a69c9c62077934d6a81510256dbe85f95beb7d9620238 + md5: 3963487fb67f4deb3e16728ad101da7c + sha256: 5eb604e7993c519d8ac5bfe9ce0a50709d4c502bafda4d38f0d4d54da2411a36 category: main optional: false - name: flask - version: 3.0.3 + version: 3.1.0 manager: conda platform: osx-arm64 dependencies: - blinker: '>=1.6.2' + python: '>=3.9' + importlib-metadata: '>=3.6' click: '>=8.1.3' - importlib-metadata: '>=3.6.0' - itsdangerous: '>=2.1.2' jinja2: '>=3.1.2' - python: '>=3.8' - werkzeug: '>=3.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/flask-3.0.3-pyhd8ed1ab_0.conda + blinker: '>=1.9' + itsdangerous: '>=2.2' + werkzeug: '>=3.1' + url: https://conda.anaconda.org/conda-forge/noarch/flask-3.1.0-pyhff2d567_0.conda hash: - md5: dcdb937144fa20d7757bf512db1ea769 - sha256: 2fc508f656fe52cb2f9a69c9c62077934d6a81510256dbe85f95beb7d9620238 + md5: 3963487fb67f4deb3e16728ad101da7c + sha256: 5eb604e7993c519d8ac5bfe9ce0a50709d4c502bafda4d38f0d4d54da2411a36 category: main optional: false - name: flatbuffers @@ -3301,51 +3281,38 @@ package: sha256: c95467f1ef83f358518cea13de8e00e3998427fc7f0dad5885f47c18aeb95ad4 category: main optional: false -- name: fmt - version: 11.0.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/fmt-11.0.2-h420ef59_0.conda - hash: - md5: 0e44849fd4764e9f85ed8caa9f24c118 - sha256: 62e6508d5bbde4aa36f7b7658ce2d8fdd0e509c0d1661735c1bd1bed00e070c4 - category: main - optional: false - name: folium - version: 0.17.0 + version: 0.19.0 manager: conda platform: linux-64 dependencies: branca: '>=0.6.0' jinja2: '>=2.9' numpy: '' - python: '>=3.8' + python: '>=3.9' requests: '' xyzservices: '' - url: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/folium-0.19.0-pyhd8ed1ab_0.conda hash: - md5: 9b96a3e6e0473b5722fa4fbefcefcded - sha256: d5c4153cad0154112daf0db648afe82ad7930523e2cb9f7379bb2d148fac0537 + md5: a8919db0d4a4b2b8ee62c0a32822c75d + sha256: 7c47132134a69c7275e9d524d361c31545ceac4058a2337418b4a626899db8bb category: main optional: false - name: folium - version: 0.17.0 + version: 0.19.0 manager: conda platform: osx-arm64 dependencies: - branca: '>=0.6.0' - jinja2: '>=2.9' numpy: '' - python: '>=3.8' requests: '' xyzservices: '' - url: https://conda.anaconda.org/conda-forge/noarch/folium-0.17.0-pyhd8ed1ab_0.conda + python: '>=3.9' + jinja2: '>=2.9' + branca: '>=0.6.0' + url: https://conda.anaconda.org/conda-forge/noarch/folium-0.19.0-pyhd8ed1ab_0.conda hash: - md5: 9b96a3e6e0473b5722fa4fbefcefcded - sha256: d5c4153cad0154112daf0db648afe82ad7930523e2cb9f7379bb2d148fac0537 + md5: a8919db0d4a4b2b8ee62c0a32822c75d + sha256: 7c47132134a69c7275e9d524d361c31545ceac4058a2337418b4a626899db8bb category: main optional: false - name: font-ttf-dejavu-sans-mono @@ -3437,33 +3404,35 @@ package: category: main optional: false - name: fontconfig - version: 2.14.2 + version: 2.15.0 manager: conda platform: linux-64 dependencies: - expat: '>=2.5.0,<3.0a0' + __glibc: '>=2.17,<3.0.a0' freetype: '>=2.12.1,<3.0a0' - libgcc-ng: '>=12' - libuuid: '>=2.32.1,<3.0a0' - libzlib: '>=1.2.13,<2.0.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda + libexpat: '>=2.6.3,<3.0a0' + libgcc: '>=13' + libuuid: '>=2.38.1,<3.0a0' + libzlib: '>=1.3.1,<2.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda hash: - md5: 0f69b688f52ff6da70bccb7ff7001d1d - sha256: 155d534c9037347ea7439a2c6da7c24ffec8e5dd278889b4c57274a1d91e0a83 + md5: 8f5b0b297b59e1ac160ad4beec99dbee + sha256: 7093aa19d6df5ccb6ca50329ef8510c6acb6b0d8001191909397368b65b02113 category: main optional: false - name: fontconfig - version: 2.14.2 + version: 2.15.0 manager: conda platform: osx-arm64 dependencies: - expat: '>=2.5.0,<3.0a0' + __osx: '>=11.0' freetype: '>=2.12.1,<3.0a0' - libzlib: '>=1.2.13,<2.0.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/fontconfig-2.14.2-h82840c6_0.conda + libexpat: '>=2.6.3,<3.0a0' + libzlib: '>=1.3.1,<2.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/fontconfig-2.15.0-h1383a14_1.conda hash: - md5: f77d47ddb6d3cc5b39b9bdf65635afbb - sha256: 7094917fc6758186e17c61d8ee8fd2bbbe9f303b4addac61d918fa415c497e2b + md5: 7b29f48742cea5d1ccb5edd839cb5621 + sha256: f79d3d816fafbd6a2b0f75ebc3251a30d3294b08af9bb747194121f5efa364bc category: main optional: false - name: fonts-conda-ecosystem @@ -3510,10 +3479,10 @@ package: manager: conda platform: osx-arm64 dependencies: - font-ttf-dejavu-sans-mono: '' + font-ttf-ubuntu: '' font-ttf-inconsolata: '' font-ttf-source-code-pro: '' - font-ttf-ubuntu: '' + font-ttf-dejavu-sans-mono: '' url: https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 hash: md5: f766549260d6815b0c52253f1fb1bb29 @@ -3521,7 +3490,7 @@ package: category: main optional: false - name: fonttools - version: 4.54.1 + version: 4.55.3 manager: conda platform: linux-64 dependencies: @@ -3531,15 +3500,15 @@ package: munkres: '' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - unicodedata2: '>=14.0.0' - url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py39h8cd3c5a_0.conda + unicodedata2: '>=15.1.0' + url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py39h9399b63_0.conda hash: - md5: a0987ca1f47be10976b5b53bc1f2db74 - sha256: eeefbc9e26df467d87cce669dd2c4b32448451ddc546b618a0f16f258462e97d + md5: 5f2545dc0944d6ffb9ce7750ab2a702f + sha256: 2c9c6a90720933406f164f468f15aad466fa633e0a7a9b673db7c148dfd91294 category: main optional: false - name: fonttools - version: 4.54.1 + version: 4.55.3 manager: conda platform: osx-arm64 dependencies: @@ -3548,11 +3517,11 @@ package: munkres: '' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - unicodedata2: '>=14.0.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.54.1-py39h06df861_0.conda + unicodedata2: '>=15.1.0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.55.3-py39hefdd603_0.conda hash: - md5: 3151e3977f7ccede7769f939926a0039 - sha256: 16f92c5596aafaa521e092665142479c6c442331259808a6238bfee45053c4b7 + md5: ada896a1fcda6813a8d4ae33e7990ba3 + sha256: 479b497892a47a616f9de1a1911891839e8d2aaf3814dd1a06be7cc0389484ba category: main optional: false - name: fqdn @@ -3561,11 +3530,11 @@ package: platform: linux-64 dependencies: cached-property: '>=1.3.0' - python: '>=2.7,<4' - url: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9,<4' + url: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda hash: - md5: 642d35437078749ef23a5dca2c9bb1f3 - sha256: 6cfd1f9bcd2358a69fb571f4b3af049b630d52647d906822dbedac03e84e4f63 + md5: d3549fd50d450b6d9e7dddff25dd2110 + sha256: 2509992ec2fd38ab27c7cdb42cf6cadc566a1cc0d1021a2673475d9fa87c6276 category: main optional: false - name: fqdn @@ -3573,12 +3542,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9,<4' cached-property: '>=1.3.0' - python: '>=2.7,<4' - url: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/fqdn-1.5.1-pyhd8ed1ab_1.conda hash: - md5: 642d35437078749ef23a5dca2c9bb1f3 - sha256: 6cfd1f9bcd2358a69fb571f4b3af049b630d52647d906822dbedac03e84e4f63 + md5: d3549fd50d450b6d9e7dddff25dd2110 + sha256: 2509992ec2fd38ab27c7cdb42cf6cadc566a1cc0d1021a2673475d9fa87c6276 category: main optional: false - name: freetype @@ -3690,7 +3659,7 @@ package: category: main optional: false - name: frozenlist - version: 1.4.1 + version: 1.5.0 manager: conda platform: linux-64 dependencies: @@ -3698,48 +3667,48 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.1-py39h8cd3c5a_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py39h8cd3c5a_0.conda hash: - md5: 6fed2734d2a0b3d4bdaadfb4e55d1da2 - sha256: ec6730aac26c054a72e2aa080a504fa6ceac1e2ef44fcf80ec0848bcdae0b4c9 + md5: 3d08c8dd785c2f57e4591ce00a144603 + sha256: 85186524d89e59067a79f4c8f7674d5f8c68ba36587f1d9dd0134e568282ae79 category: main optional: false - name: frozenlist - version: 1.4.1 + version: 1.5.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/frozenlist-1.4.1-py39h06df861_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/frozenlist-1.5.0-py39h57695bc_0.conda hash: - md5: a3e9bc1641ddec94fde543ded6d332f4 - sha256: 7cf9ab6a581a932f84330f3014ce373377afbea5d353ef8f42cf35794f6b3d8d + md5: 7dd1f42b97b0e01a626a9cdbd314aea5 + sha256: 0cd8904189a3ca9664a5d0635c36ac6630a18548d935ae60f57cc7da936fbcdc category: main optional: false - name: fsspec - version: 2024.9.0 + version: 2024.10.0 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda hash: - md5: ace4329fbff4c69ab0309db6da182987 - sha256: 8f4e9805b4ec223dea0d99f9e7e57c391d9026455eb9f0d6e0784c5d1a1200dc + md5: 906fe13095e734cb413b57a49116cdc8 + sha256: 790a50b4f94042951518f911a914a886a837c926094c6a14ed1d9d03ce336807 category: main optional: false - name: fsspec - version: 2024.9.0 + version: 2024.10.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda hash: - md5: ace4329fbff4c69ab0309db6da182987 - sha256: 8f4e9805b4ec223dea0d99f9e7e57c391d9026455eb9f0d6e0784c5d1a1200dc + md5: 906fe13095e734cb413b57a49116cdc8 + sha256: 790a50b4f94042951518f911a914a886a837c926094c6a14ed1d9d03ce336807 category: main optional: false - name: furo @@ -3764,10 +3733,10 @@ package: platform: osx-arm64 dependencies: beautifulsoup4: '' - pygments: '>=2.7' + sphinx-basic-ng: '' python: '>=3.7' + pygments: '>=2.7' sphinx: '>=6.0,<9.0' - sphinx-basic-ng: '' url: https://conda.anaconda.org/conda-forge/noarch/furo-2024.8.6-pyhd8ed1ab_1.conda hash: md5: 1de9286f68ce577064262b0071ac9b4e @@ -3779,11 +3748,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_1.conda hash: - md5: 650a7807e689642dddd3590eb817beed - sha256: 8c918a63595ae01575b738ddf0bff10dc23a5002d4af4c8b445d1179a76a8efd + md5: e75df25fe5ddec19b2f6ac8dfd33b838 + sha256: 8af9609ae02895c7550965eee8d3f0e3a0dd2882397693bc6f0798f37e4c9333 category: main optional: false - name: future @@ -3791,35 +3760,35 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/future-1.0.0-pyhd8ed1ab_1.conda hash: - md5: 650a7807e689642dddd3590eb817beed - sha256: 8c918a63595ae01575b738ddf0bff10dc23a5002d4af4c8b445d1179a76a8efd + md5: e75df25fe5ddec19b2f6ac8dfd33b838 + sha256: 8af9609ae02895c7550965eee8d3f0e3a0dd2882397693bc6f0798f37e4c9333 category: main optional: false - name: gast - version: 0.5.5 + version: 0.6.0 manager: conda platform: linux-64 dependencies: - python: '>=3.4' - url: https://conda.anaconda.org/conda-forge/noarch/gast-0.5.5-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/gast-0.6.0-pyhd8ed1ab_0.conda hash: - md5: ebc1dc871c48673a0a922023a2e1eee2 - sha256: b0527039bb19aeb5636ecb1512378e4109b945bc99f409977bda3022485c526f + md5: 3fb76e88cbe6f96dfdaff277268bded9 + sha256: bab273e55303409508a67f8e8e34664a1219d322364ba4fbed67b5516c882986 category: main optional: false - name: gast - version: 0.5.5 + version: 0.6.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.4' - url: https://conda.anaconda.org/conda-forge/noarch/gast-0.5.5-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/gast-0.6.0-pyhd8ed1ab_0.conda hash: - md5: ebc1dc871c48673a0a922023a2e1eee2 - sha256: b0527039bb19aeb5636ecb1512378e4109b945bc99f409977bda3022485c526f + md5: 3fb76e88cbe6f96dfdaff277268bded9 + sha256: bab273e55303409508a67f8e8e34664a1219d322364ba4fbed67b5516c882986 category: main optional: false - name: gdal @@ -3842,25 +3811,6 @@ package: sha256: e8307f25b414df4c6e2de46b2ed8b72fed934e953c219b8b7aa115f103d3a5d9 category: main optional: false -- name: gdal - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libcxx: '>=17' - libgdal-core: 3.9.2.* - libkml: '>=1.3.0,<1.4.0a0' - libxml2: '>=2.12.7,<3.0a0' - numpy: '>=1.19,<3' - python: '>=3.9,<3.10.0a0' - python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/gdal-3.9.2-py39hd9310e5_7.conda - hash: - md5: 59dfc9ab35ed42e6c6e117d110fb8dbe - sha256: 80e3333f16c2e3af87989c32065a56c89a1a5dfecbb701090c283f899da78fe0 - category: main - optional: false - name: gdk-pixbuf version: 2.42.10 manager: conda @@ -3917,14 +3867,14 @@ package: manager: conda platform: osx-arm64 dependencies: - fiona: '>=1.8.21' - folium: '' - geopandas-base: 0.14.4 - mapclassify: '>=2.4.0' matplotlib-base: '' - python: '>=3.9' rtree: '' xyzservices: '' + folium: '' + python: '>=3.9' + mapclassify: '>=2.4.0' + fiona: '>=1.8.21' + geopandas-base: 0.14.4 url: https://conda.anaconda.org/conda-forge/noarch/geopandas-0.14.4-pyhd8ed1ab_0.conda hash: md5: acc01facf6f915b6289a064957a58cc1 @@ -3953,9 +3903,9 @@ package: platform: osx-arm64 dependencies: packaging: '' + python: '>=3.9' pandas: '>=1.4.0' pyproj: '>=3.3.0' - python: '>=3.9' shapely: '>=1.8.0' url: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-0.14.4-pyha770c72_0.conda hash: @@ -4112,12 +4062,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' + python: '>=3.9' smmap: '>=3.0.1,<6' - url: https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_1.conda hash: - md5: 623b19f616f2ca0c261441067e18ae40 - sha256: 52ab2798be31b8f509eeec458712f447ced4f96ecb672c6c9a42778f47e07b1b + md5: 9d3a3c39dd982332dab2aac113492013 + sha256: a5150ca4103c3ded9f7664bd5176cf0a6f3da86886552bfd3d519826518b2a3d category: main optional: false - name: gitdb @@ -4125,12 +4075,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' + python: '>=3.9' smmap: '>=3.0.1,<6' - url: https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_1.conda hash: - md5: 623b19f616f2ca0c261441067e18ae40 - sha256: 52ab2798be31b8f509eeec458712f447ced4f96ecb672c6c9a42778f47e07b1b + md5: 9d3a3c39dd982332dab2aac113492013 + sha256: a5150ca4103c3ded9f7664bd5176cf0a6f3da86886552bfd3d519826518b2a3d category: main optional: false - name: gitpython @@ -4139,12 +4089,12 @@ package: platform: linux-64 dependencies: gitdb: '>=4.0.1,<5' - python: '>=3.7' + python: '>=3.9' typing_extensions: '>=3.7.4.3' - url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.43-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.43-pyhff2d567_1.conda hash: - md5: 0b2154c1818111e17381b1df5b4b0176 - sha256: cbb2802641a009ce9bcc2a047e817fd8816f9c842036a42f4730398d8e4cda2a + md5: 23867f6f9fcd2fb9e9ce6427addf01ae + sha256: eb4bc75fe20aa0404ef698e08cf8864149300d96740268763b4c829baf8af571 category: main optional: false - name: gitpython @@ -4152,13 +4102,13 @@ package: manager: conda platform: osx-arm64 dependencies: - gitdb: '>=4.0.1,<5' - python: '>=3.7' + python: '>=3.9' typing_extensions: '>=3.7.4.3' - url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.43-pyhd8ed1ab_0.conda + gitdb: '>=4.0.1,<5' + url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.43-pyhff2d567_1.conda hash: - md5: 0b2154c1818111e17381b1df5b4b0176 - sha256: cbb2802641a009ce9bcc2a047e817fd8816f9c842036a42f4730398d8e4cda2a + md5: 23867f6f9fcd2fb9e9ce6427addf01ae + sha256: eb4bc75fe20aa0404ef698e08cf8864149300d96740268763b4c829baf8af571 category: main optional: false - name: glog @@ -4227,10 +4177,10 @@ package: mpfr: '>=4.2.1,<5.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/gmpy2-2.1.5-py39h7196dd7_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/gmpy2-2.1.5-py39h7196dd7_3.conda hash: - md5: fe02313bcacbe84ca5f783b077873182 - sha256: c66367439a3cbac72f886789b02f20105343740f76bf52f7bee3073b63b0b42b + md5: 43d0fdcd098fb84338640451fb46ca93 + sha256: b69a0968b56413c070743a644a95bb917959bfe52f378d1539db658b00cdd97a category: main optional: false - name: gmpy2 @@ -4244,76 +4194,76 @@ package: mpfr: '>=4.2.1,<5.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/gmpy2-2.1.5-py39h0bbb021_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/gmpy2-2.1.5-py39h478d0be_3.conda hash: - md5: 776626754e7df54694dbc26953ad7b04 - sha256: 77e432cd429f8bc6b623ed7a1f6549836a111e29d2827bd0698d11ab80d2ccf8 + md5: e1681d7e704cbc065d66e436af1cc215 + sha256: 2fb5e2def36867bc8af159bac15330d6af4cb543bf8928eccab3cb36b4a3023f category: main optional: false - name: google-api-core - version: 2.21.0 + version: 2.24.0 manager: conda platform: linux-64 dependencies: google-auth: '>=2.14.1,<3.0.dev0' googleapis-common-protos: '>=1.56.2,<2.0.dev0' - proto-plus: '>=1.22.3,<2.0.0dev' + proto-plus: '>=1.25.0,<2.0.0dev' protobuf: '>=3.19.5,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' - python: '>=3.7' + python: '>=3.9' requests: '>=2.18.0,<3.0.0.dev0' - url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.21.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.24.0-pyhd8ed1ab_0.conda hash: - md5: edeea37b608c49ffa472e03ecb54e026 - sha256: 34558e083b9caa0af561e7ef0428f33ae7453488e41bba1b4af055aef67425cd + md5: 66f12c39effbbac96a88f722f7153790 + sha256: 20b9a3968f14fa0836b4f787b6929d82955af89de75287e933cd7ed520041ab4 category: main optional: false - name: google-api-core - version: 2.21.0 + version: 2.24.0 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' + proto-plus: '>=1.25.0,<2.0.0dev' google-auth: '>=2.14.1,<3.0.dev0' googleapis-common-protos: '>=1.56.2,<2.0.dev0' - proto-plus: '>=1.22.3,<2.0.0dev' - protobuf: '>=3.19.5,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' - python: '>=3.7' requests: '>=2.18.0,<3.0.0.dev0' - url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.21.0-pyhd8ed1ab_0.conda + protobuf: '>=3.19.5,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' + url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-2.24.0-pyhd8ed1ab_0.conda hash: - md5: edeea37b608c49ffa472e03ecb54e026 - sha256: 34558e083b9caa0af561e7ef0428f33ae7453488e41bba1b4af055aef67425cd + md5: 66f12c39effbbac96a88f722f7153790 + sha256: 20b9a3968f14fa0836b4f787b6929d82955af89de75287e933cd7ed520041ab4 category: main optional: false - name: google-api-core-grpc - version: 2.21.0 + version: 2.24.0 manager: conda platform: linux-64 dependencies: - google-api-core: 2.21.0 + google-api-core: 2.24.0 grpcio: '>=1.49.1,<2.0.dev0' grpcio-status: '>=1.49.1,<2.0.dev0' - url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-grpc-2.21.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-grpc-2.24.0-hd8ed1ab_0.conda hash: - md5: 7645014b522396905fda031043ff744a - sha256: a116f6c6d3ff450e8d8fb2772af83fc73a7a325b9fc4161b474feafa7f77dc42 + md5: f5690937308ccaccd23ca200b93dbee3 + sha256: f08e6174220ff15a89371951454ca3446466638e7a33e0fec56b5fc4b0c11746 category: main optional: false - name: google-api-core-grpc - version: 2.21.0 + version: 2.24.0 manager: conda platform: osx-arm64 dependencies: - google-api-core: 2.21.0 grpcio: '>=1.49.1,<2.0.dev0' grpcio-status: '>=1.49.1,<2.0.dev0' - url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-grpc-2.21.0-hd8ed1ab_0.conda + google-api-core: 2.24.0 + url: https://conda.anaconda.org/conda-forge/noarch/google-api-core-grpc-2.24.0-hd8ed1ab_0.conda hash: - md5: 7645014b522396905fda031043ff744a - sha256: a116f6c6d3ff450e8d8fb2772af83fc73a7a325b9fc4161b474feafa7f77dc42 + md5: f5690937308ccaccd23ca200b93dbee3 + sha256: f08e6174220ff15a89371951454ca3446466638e7a33e0fec56b5fc4b0c11746 category: main optional: false - name: google-auth - version: 2.35.0 + version: 2.36.0 manager: conda platform: linux-64 dependencies: @@ -4322,34 +4272,34 @@ package: cryptography: '>=38.0.3' pyasn1-modules: '>=0.2.1' pyopenssl: '>=20.0.0' - python: '>=3.7' + python: '>=3.9' pyu2f: '>=0.1.5' requests: '>=2.20.0,<3.0.0' rsa: '>=3.1.4,<5' - url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.35.0-pyhff2d567_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.36.0-pyhd8ed1ab_1.conda hash: - md5: 7a6b4c81d9062a9e92b9ef0548aebc06 - sha256: 62533066d372fd2f5bb9f38e8a44465f404a116210703ec75b88d34c28cc4caa + md5: 8d2b87bb99fadba6b6a5bbfa77dfb5b8 + sha256: 9af482ce34b026012fc4b507c330dbaf82ecf3c52652cc7fac52d26084aa15d7 category: main optional: false - name: google-auth - version: 2.35.0 + version: 2.36.0 manager: conda platform: osx-arm64 dependencies: - aiohttp: '>=3.6.2,<4.0.0' - cachetools: '>=2.0.0,<6.0' - cryptography: '>=38.0.3' + python: '>=3.9' pyasn1-modules: '>=0.2.1' + rsa: '>=3.1.4,<5' + requests: '>=2.20.0,<3.0.0' pyopenssl: '>=20.0.0' - python: '>=3.7' pyu2f: '>=0.1.5' - requests: '>=2.20.0,<3.0.0' - rsa: '>=3.1.4,<5' - url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.35.0-pyhff2d567_0.conda + cachetools: '>=2.0.0,<6.0' + aiohttp: '>=3.6.2,<4.0.0' + cryptography: '>=38.0.3' + url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.36.0-pyhd8ed1ab_1.conda hash: - md5: 7a6b4c81d9062a9e92b9ef0548aebc06 - sha256: 62533066d372fd2f5bb9f38e8a44465f404a116210703ec75b88d34c28cc4caa + md5: 8d2b87bb99fadba6b6a5bbfa77dfb5b8 + sha256: 9af482ce34b026012fc4b507c330dbaf82ecf3c52652cc7fac52d26084aa15d7 category: main optional: false - name: google-auth-oauthlib @@ -4372,10 +4322,10 @@ package: manager: conda platform: osx-arm64 dependencies: - click: '>=6.0.0' - google-auth: '>=2.15.0' python: '>=3.6' requests-oauthlib: '>=0.7.0' + click: '>=6.0.0' + google-auth: '>=2.15.0' url: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.2.1-pyhd8ed1ab_0.conda hash: md5: b252850143cd2080d87060f891d3b288 @@ -4383,14 +4333,14 @@ package: category: main optional: false - name: google-cloud-bigquery - version: 3.26.0 + version: 3.27.0 manager: conda platform: linux-64 dependencies: bigquery-magics: '>=0.1.0' db-dtypes: '>=0.3.0,<2.0.0dev' geopandas: '>=0.9.0,<1.0dev' - google-cloud-bigquery-core: 3.26.0 + google-cloud-bigquery-core: 3.27.0 google-cloud-bigquery-storage: '>=2.6.0,<3.0.0dev' grpcio: '>=1.49.1,<2.0dev' ipykernel: '>=6.0.0' @@ -4402,40 +4352,40 @@ package: python: '>=3.8' shapely: '>=1.8.4,<3.0.0dev' tqdm: '>=4.7.4,<=5.0.0dev' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.26.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.27.0-pyhd8ed1ab_0.conda hash: - md5: aa1416b8a545513e663271f32b2793f5 - sha256: 4d3581edcdfc6206ecbcd2ce01b67ca9c8025ca365f95d582c3c3835bee08e74 + md5: 226cb7c6a7e1fa1ed0a167618c6364fe + sha256: cbbd6dbd99beac2ff55258283a98091acba32b8c6f25ce9f7145991ad6fa4927 category: main optional: false - name: google-cloud-bigquery - version: 3.26.0 + version: 3.27.0 manager: conda platform: osx-arm64 dependencies: - bigquery-magics: '>=0.1.0' - db-dtypes: '>=0.3.0,<2.0.0dev' - geopandas: '>=0.9.0,<1.0dev' - google-cloud-bigquery-core: 3.26.0 - google-cloud-bigquery-storage: '>=2.6.0,<3.0.0dev' - grpcio: '>=1.49.1,<2.0dev' - ipykernel: '>=6.0.0' - ipywidgets: '>=7.7.0' + python: '>=3.8' pandas: '>=1.1.0' proto-plus: '>=1.22.3,<2.0.0dev' + tqdm: '>=4.7.4,<=5.0.0dev' protobuf: '>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' + geopandas: '>=0.9.0,<1.0dev' pyarrow: '>=3.0.0' - python: '>=3.8' + db-dtypes: '>=0.3.0,<2.0.0dev' + grpcio: '>=1.49.1,<2.0dev' + ipywidgets: '>=7.7.0' + ipykernel: '>=6.0.0' + google-cloud-bigquery-storage: '>=2.6.0,<3.0.0dev' shapely: '>=1.8.4,<3.0.0dev' - tqdm: '>=4.7.4,<=5.0.0dev' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.26.0-pyhd8ed1ab_0.conda + bigquery-magics: '>=0.1.0' + google-cloud-bigquery-core: 3.27.0 + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.27.0-pyhd8ed1ab_0.conda hash: - md5: aa1416b8a545513e663271f32b2793f5 - sha256: 4d3581edcdfc6206ecbcd2ce01b67ca9c8025ca365f95d582c3c3835bee08e74 + md5: 226cb7c6a7e1fa1ed0a167618c6364fe + sha256: cbbd6dbd99beac2ff55258283a98091acba32b8c6f25ce9f7145991ad6fa4927 category: main optional: false - name: google-cloud-bigquery-core - version: 3.26.0 + version: 3.27.0 manager: conda platform: linux-64 dependencies: @@ -4447,29 +4397,29 @@ package: python: '>=3.8' python-dateutil: '>=2.7.3,<3.0dev' requests: '>=2.21.0,<3.0.0dev' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.26.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.27.0-pyhd8ed1ab_0.conda hash: - md5: e78dd7693402206546c6fab226fed609 - sha256: 3a842f7ed820549e0d9406970a994248fe08c8604bc9e9682efeaa4f29e60268 + md5: 92eb20d3933ec7001d3ef41b6689f6f4 + sha256: 9e6199ba98ae2df14a09f5a0bc69beffd3e8485bc05e5937bec432150e458246 category: main optional: false - name: google-cloud-bigquery-core - version: 3.26.0 + version: 3.27.0 manager: conda platform: osx-arm64 dependencies: - google-api-core-grpc: '>=2.11.1,<3.0.0dev' + python: '>=3.8' google-auth: '>=2.14.1,<3.0.0dev' + packaging: '>=20.0.0' + requests: '>=2.21.0,<3.0.0dev' + google-api-core-grpc: '>=2.11.1,<3.0.0dev' google-cloud-core: '>=2.4.1,<3.0.0dev' google-resumable-media: '>=2.0.0,<3.0dev' - packaging: '>=20.0.0' - python: '>=3.8' python-dateutil: '>=2.7.3,<3.0dev' - requests: '>=2.21.0,<3.0.0dev' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.26.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.27.0-pyhd8ed1ab_0.conda hash: - md5: e78dd7693402206546c6fab226fed609 - sha256: 3a842f7ed820549e0d9406970a994248fe08c8604bc9e9682efeaa4f29e60268 + md5: 92eb20d3933ec7001d3ef41b6689f6f4 + sha256: 9e6199ba98ae2df14a09f5a0bc69beffd3e8485bc05e5937bec432150e458246 category: main optional: false - name: google-cloud-bigquery-storage @@ -4489,19 +4439,19 @@ package: category: main optional: false - name: google-cloud-bigquery-storage - version: 2.27.0 + version: 2.11.0 manager: conda platform: osx-arm64 dependencies: + python: '' + pyarrow: '>=0.15.0' fastavro: '>=0.21.2' - google-cloud-bigquery-storage-core: 2.27.0.* pandas: '>=0.21.1' - pyarrow: '>=0.15.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-2.27.0-pyhff2d567_0.conda + google-cloud-bigquery-storage-core: 2.11.0.* + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-2.11.0-pyh6c4a22f_0.tar.bz2 hash: - md5: 35d2f945bf888a3612b75a73ace59152 - sha256: f712295ba2c4006fd006635caba75ee940e268655754431e5265e02828194e94 + md5: 28ef07a4101ddea28bf766b95fc25128 + sha256: 26ac57129328803602c6e199c8b98f31dca7407d4bb7c42d3876baa98ac9f90c category: main optional: false - name: google-cloud-bigquery-storage-core @@ -4521,19 +4471,18 @@ package: category: main optional: false - name: google-cloud-bigquery-storage-core - version: 2.27.0 + version: 2.11.0 manager: conda platform: osx-arm64 dependencies: - google-api-core-grpc: '>=1.34.0,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*' - google-auth: '>=2.14.1,<3' - proto-plus: '>=1.22.0,<2.0.0dev' - protobuf: '>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-core-2.27.0-pyhff2d567_0.conda + python: '>=3.6' + libcst: '>=0.2.5' + google-api-core-grpc: '>=1.28.0,<3.0.0dev' + proto-plus: '>=1.18.0' + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-core-2.11.0-pyh6c4a22f_0.tar.bz2 hash: - md5: 9ea2bb1ebc301c01ee1d04a645af6b14 - sha256: fb9269c2426aab919cd0b3bb5e45e84a3bb0347240faa5be20f36053f867eebe + md5: 4e756999f22c7c02dc9f004ad1906b74 + sha256: 9597b1af93ef3182112c140fa26832568a6753f95b1c00371597fe3e7d72b672 category: main optional: false - name: google-cloud-core @@ -4544,11 +4493,11 @@ package: google-api-core: '>=1.31.6,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0' google-auth: '>=1.25.0,<3.0dev' grpcio: '>=1.38.0,<2.0.0dev' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_1.conda hash: - md5: 1853cdebbfe25fb6ee253855a44945a6 - sha256: d01b787bad2ec4da9536ce2cedb3e53ed092fe6a4a596c043ab358bb9b2fbcdd + md5: 574cda1b3e3c74cab4632659d39fdf07 + sha256: 28af1a03f9debc80d6dddebfd50c45b75579f02095a44b583e5fecea7b347626 category: main optional: false - name: google-cloud-core @@ -4556,14 +4505,14 @@ package: manager: conda platform: osx-arm64 dependencies: - google-api-core: '>=1.31.6,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0' + python: '>=3.9' google-auth: '>=1.25.0,<3.0dev' + google-api-core: '>=1.31.6,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0' grpcio: '>=1.38.0,<2.0.0dev' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-core-2.4.1-pyhd8ed1ab_1.conda hash: - md5: 1853cdebbfe25fb6ee253855a44945a6 - sha256: d01b787bad2ec4da9536ce2cedb3e53ed092fe6a4a596c043ab358bb9b2fbcdd + md5: 574cda1b3e3c74cab4632659d39fdf07 + sha256: 28af1a03f9debc80d6dddebfd50c45b75579f02095a44b583e5fecea7b347626 category: main optional: false - name: google-crc32c @@ -4606,12 +4555,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' + python: '>=3.9' six: '' - url: https://conda.anaconda.org/conda-forge/noarch/google-pasta-0.2.0-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-pasta-0.2.0-pyhd8ed1ab_2.conda hash: - md5: 5257b8fdee0c88e6bd3a10d38bc3892a - sha256: 264b830bfa9cfe41a4bfbc18a7bba2cebc9b2f6ee711b873e49133c6e4a304e8 + md5: 005b9749218cb8c9e94ac2a77ca3c8c0 + sha256: 9f668fe562a9cf71a5d1f348645ac041af3f2e4bc634b18d6374e838e1c55dd8 category: main optional: false - name: google-pasta @@ -4619,12 +4568,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' six: '' - url: https://conda.anaconda.org/conda-forge/noarch/google-pasta-0.2.0-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/google-pasta-0.2.0-pyhd8ed1ab_2.conda hash: - md5: 5257b8fdee0c88e6bd3a10d38bc3892a - sha256: 264b830bfa9cfe41a4bfbc18a7bba2cebc9b2f6ee711b873e49133c6e4a304e8 + md5: 005b9749218cb8c9e94ac2a77ca3c8c0 + sha256: 9f668fe562a9cf71a5d1f348645ac041af3f2e4bc634b18d6374e838e1c55dd8 category: main optional: false - name: google-resumable-media @@ -4633,11 +4582,11 @@ package: platform: linux-64 dependencies: google-crc32c: '>=1.0,<2.0dev' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_2.conda hash: - md5: 357cb6c361778650356349769e4c834b - sha256: 2ff33f5e48df03d86c2ca839afc3168b641106fa57603f7b39431524a595b661 + md5: 1792ca195c71d1304b3f7c783a3d7419 + sha256: 53f613ff22203c9d8a81ac9eb2351d0b9dea44e92922e62cdd2d45a676582cc7 category: main optional: false - name: google-resumable-media @@ -4645,68 +4594,70 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' google-crc32c: '>=1.0,<2.0dev' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-resumable-media-2.7.2-pyhd8ed1ab_2.conda hash: - md5: 357cb6c361778650356349769e4c834b - sha256: 2ff33f5e48df03d86c2ca839afc3168b641106fa57603f7b39431524a595b661 + md5: 1792ca195c71d1304b3f7c783a3d7419 + sha256: 53f613ff22203c9d8a81ac9eb2351d0b9dea44e92922e62cdd2d45a676582cc7 category: main optional: false - name: googleapis-common-protos - version: 1.65.0 + version: 1.66.0 manager: conda platform: linux-64 dependencies: protobuf: '>=3.20.2,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.65.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.66.0-pyhff2d567_0.conda hash: - md5: f5bdd5dd4ad1fd075a6f25670bdda1b6 - sha256: 093e899196b6bedb761c707677a3bc7161a04371084eb26f489327e8aa8d6f25 + md5: 4861e30ff0cd566ea6fb4593e3b7c22a + sha256: d8d19575a827f2c62500949b9536efdd6b5406c9f546a73b6a87ac90b03a5875 category: main optional: false - name: googleapis-common-protos - version: 1.65.0 + version: 1.66.0 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' protobuf: '>=3.20.2,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.65.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/googleapis-common-protos-1.66.0-pyhff2d567_0.conda hash: - md5: f5bdd5dd4ad1fd075a6f25670bdda1b6 - sha256: 093e899196b6bedb761c707677a3bc7161a04371084eb26f489327e8aa8d6f25 + md5: 4861e30ff0cd566ea6fb4593e3b7c22a + sha256: d8d19575a827f2c62500949b9536efdd6b5406c9f546a73b6a87ac90b03a5875 category: main optional: false - name: graphene - version: '3.3' + version: 3.4.3 manager: conda platform: linux-64 dependencies: - aniso8601: '>=8,<10' graphql-core: '>=3.1,<3.3' graphql-relay: '>=3.1,<3.3' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/graphene-3.3-pyhd8ed1ab_0.conda + python: '>=3.8' + python-dateutil: '>=2.7.0,<3' + typing_extensions: '>=4.7.1,<5' + url: https://conda.anaconda.org/conda-forge/noarch/graphene-3.4.3-pyhd8ed1ab_0.conda hash: - md5: ed2ae94977dfd96566e6eaf373216728 - sha256: 8b4e2c1d326849c0094f9e96a9833addb10f638be67bb0590836720879697ec6 + md5: b5afb060743a524f5a5d83abd8938097 + sha256: dee3301981bc767793597ea81ecac71c4f6c9f64621513674d2f577910c9760d category: main optional: false - name: graphene - version: '3.3' + version: 3.4.3 manager: conda platform: osx-arm64 dependencies: - aniso8601: '>=8,<10' + python: '>=3.8' + python-dateutil: '>=2.7.0,<3' graphql-core: '>=3.1,<3.3' graphql-relay: '>=3.1,<3.3' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/graphene-3.3-pyhd8ed1ab_0.conda + typing_extensions: '>=4.7.1,<5' + url: https://conda.anaconda.org/conda-forge/noarch/graphene-3.4.3-pyhd8ed1ab_0.conda hash: - md5: ed2ae94977dfd96566e6eaf373216728 - sha256: 8b4e2c1d326849c0094f9e96a9833addb10f638be67bb0590836720879697ec6 + md5: b5afb060743a524f5a5d83abd8938097 + sha256: dee3301981bc767793597ea81ecac71c4f6c9f64621513674d2f577910c9760d category: main optional: false - name: graphite2 @@ -4779,8 +4730,8 @@ package: manager: conda platform: osx-arm64 dependencies: - graphql-core: '>=3.2,<3.3' python: '>=3.6' + graphql-core: '>=3.2,<3.3' typing_extensions: '>=4.1,<5' url: https://conda.anaconda.org/conda-forge/noarch/graphql-relay-3.2.0-pyhd8ed1ab_0.tar.bz2 hash: @@ -4883,35 +4834,35 @@ package: manager: conda platform: osx-arm64 dependencies: - altair: '>=4.2.1,<5.0.0' - click: '>=7.1.2' + packaging: '' + python: '>=3.8' + requests: '>=2.20' + python-dateutil: '>=2.8.1' + jinja2: '>=2.10' + pandas: '>=1.1.0' colorama: '>=0.4.3' - cryptography: '>=3.2' - ipython: '>=7.16.3' + click: '>=7.1.2' ipywidgets: '>=7.5.1' - jinja2: '>=2.10' - jsonpatch: '>=1.22' jsonschema: '>=2.5.1' - makefun: '>=1.7.0,<2' - marshmallow: '>=3.7.1,<4.0.0' + scipy: '>=1.6.0' + tqdm: '>=4.59.0' + tzlocal: '>=1.2' + jsonpatch: '>=1.22' mistune: '>=0.8.4' + typing-extensions: '>=3.10.0.0' + pytz: '>=2021.3' nbformat: '>=5.0' + urllib3: '>=1.26' + ipython: '>=7.16.3' notebook: '>=6.4.10' + cryptography: '>=3.2' + pyparsing: '>=2.4' + makefun: '>=1.7.0,<2' + marshmallow: '>=3.7.1,<4.0.0' + altair: '>=4.2.1,<5.0.0' numpy: '>=1.20.3,<2' - packaging: '' - pandas: '>=1.1.0' pydantic: '>=1.9.2' - pyparsing: '>=2.4' - python: '>=3.8' - python-dateutil: '>=2.8.1' - pytz: '>=2021.3' - requests: '>=2.20' ruamel.yaml: '>=0.16,<0.18' - scipy: '>=1.6.0' - tqdm: '>=4.59.0' - typing-extensions: '>=3.10.0.0' - tzlocal: '>=1.2' - urllib3: '>=1.26' url: https://conda.anaconda.org/conda-forge/noarch/great-expectations-0.18.19-pyhd8ed1ab_0.conda hash: md5: 44e23f86f4f1e5b8b7073ff879d875f6 @@ -4966,19 +4917,20 @@ package: category: main optional: false - name: grpcio - version: 1.62.2 + version: 1.65.5 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=16' - libgrpc: 1.62.2 - libzlib: '>=1.2.13,<2.0.0a0' + __osx: '>=11.0' + libcxx: '>=17' + libgrpc: 1.65.5 + libzlib: '>=1.3.1,<2.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/grpcio-1.62.2-py39h047a24b_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/grpcio-1.65.5-py39h05480be_0.conda hash: - md5: 87c43107cd3a14cb6510a04cf5db7d4a - sha256: 168d5f11d351a39d740ff5e4301e2590672ed695c7220182007980b5ae2e038f + md5: 736cf911aaa47705664ef788b441984a + sha256: 9f977fc0b58894108bfee8241c22d99e11bd2df409fa48968afa1fc2a7110a17 category: main optional: false - name: grpcio-status @@ -4997,18 +4949,18 @@ package: category: main optional: false - name: grpcio-status - version: 1.62.2 + version: 1.65.5 manager: conda platform: osx-arm64 dependencies: + python: '>=3.8' googleapis-common-protos: '>=1.5.5' - grpcio: '>=1.62.2' - protobuf: '>=4.21.6' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/grpcio-status-1.62.2-pyhd8ed1ab_0.conda + protobuf: '>=5.26.1,<6.0dev' + grpcio: '>=1.65.5' + url: https://conda.anaconda.org/conda-forge/noarch/grpcio-status-1.65.5-pyhd8ed1ab_0.conda hash: - md5: 1fa7b310dbed89a6ab1e8000b161799c - sha256: fe0d64018146b2dfc3ded035ba3f7d55672df21f3a9e5ba3d37a09a02aeff773 + md5: 27057d76c2a4a614ee4f6c9147df094c + sha256: a07d26e3918330dace23ea5b3db51a4b6a7c05df8deaf8a3d0cdd52d013c01f8 category: main optional: false - name: gtk2 @@ -5115,12 +5067,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3' + python: '>=3.9' typing_extensions: '' - url: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda hash: - md5: b21ed0883505ba1910994f1df031a428 - sha256: 817d2c77d53afe3f3d9cf7f6eb8745cdd8ea76c7adaa9d7ced75c455a2c2c085 + md5: 7ee49e89531c0dcbba9466f6d115d585 + sha256: 622516185a7c740d5c7f27016d0c15b45782c1501e5611deec63fd70344ce7c8 category: main optional: false - name: h11 @@ -5128,12 +5080,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3' typing_extensions: '' - url: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/h11-0.14.0-pyhd8ed1ab_1.conda hash: - md5: b21ed0883505ba1910994f1df031a428 - sha256: 817d2c77d53afe3f3d9cf7f6eb8745cdd8ea76c7adaa9d7ced75c455a2c2c085 + md5: 7ee49e89531c0dcbba9466f6d115d585 + sha256: 622516185a7c740d5c7f27016d0c15b45782c1501e5611deec63fd70344ce7c8 category: main optional: false - name: h2 @@ -5143,11 +5095,11 @@ package: dependencies: hpack: '>=4.0,<5' hyperframe: '>=6.0,<7' - python: '>=3.6.1' - url: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda hash: - md5: b748fbf7060927a6e82df7cb5ee8f097 - sha256: bfc6a23849953647f4e255c782e74a0e18fe16f7e25c7bb0bc57b83bb6762c7a + md5: 825927dc7b0f287ef8d4d0011bb113b1 + sha256: 843ddad410c370672a8250470697027618f104153612439076d4d7b91eeb7b5c category: main optional: false - name: h2 @@ -5155,17 +5107,17 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' hpack: '>=4.0,<5' hyperframe: '>=6.0,<7' - python: '>=3.6.1' - url: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda hash: - md5: b748fbf7060927a6e82df7cb5ee8f097 - sha256: bfc6a23849953647f4e255c782e74a0e18fe16f7e25c7bb0bc57b83bb6762c7a + md5: 825927dc7b0f287ef8d4d0011bb113b1 + sha256: 843ddad410c370672a8250470697027618f104153612439076d4d7b91eeb7b5c category: main optional: false - name: h5py - version: 3.11.0 + version: 3.12.1 manager: conda platform: linux-64 dependencies: @@ -5176,14 +5128,14 @@ package: numpy: '>=1.19,<3' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.11.0-nompi_py39h30a5a8d_103.conda + url: https://conda.anaconda.org/conda-forge/linux-64/h5py-3.12.1-nompi_py39h30a5a8d_102.conda hash: - md5: 875851870752d93655c848dafab4bc0d - sha256: a1abdf04c5cd10569dc19c98d97baad2864bf42cb16290ec1c83826fb3a1c5e3 + md5: 0f00a43e136434f644db1863b5c6b1b2 + sha256: 2b550f99b27e650be3106ef7da27a717745fff672e3f34a4f1c6205c99a12895 category: main optional: false - name: h5py - version: 3.11.0 + version: 3.12.1 manager: conda platform: osx-arm64 dependencies: @@ -5193,10 +5145,10 @@ package: numpy: '>=1.19,<3' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/h5py-3.11.0-nompi_py39h5dd549c_103.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/h5py-3.12.1-nompi_py39h5dd549c_102.conda hash: - md5: 27843e4b147c39b85dcf3744418b45d6 - sha256: 8aaff2990bcb2ef8a03d36852d1e8934a6f2a88b019190f1bcab35dd559874d9 + md5: f335eff00f6c7c4cfe2ef53262e026eb + sha256: 8595e53f764f7be29afa0be37b99b2fa33405ae15cf087491dd9a90b9d4d444c category: main optional: false - name: harfbuzz @@ -5250,37 +5202,24 @@ package: sha256: 0d09b6dc1ce5c4005ae1c6a19dc10767932ef9a5e9c755cfdbb5189ac8fb0684 category: main optional: false -- name: hdf4 - version: 4.2.15 - manager: conda - platform: osx-arm64 - dependencies: - libcxx: '>=15.0.7' - libjpeg-turbo: '>=3.0.0,<4.0a0' - libzlib: '>=1.2.13,<2.0.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/hdf4-4.2.15-h2ee6834_7.conda - hash: - md5: ff5d749fd711dc7759e127db38005924 - sha256: c3b01e3c3fe4ca1c4d28c287eaa5168a4f2fd3ffd76690082ac919244c22fa90 - category: main - optional: false - name: hdf5 version: 1.14.3 manager: conda platform: linux-64 dependencies: + __glibc: '>=2.17,<3.0.a0' libaec: '>=1.1.3,<2.0a0' - libcurl: '>=8.8.0,<9.0a0' - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<2.0a0' - openssl: '>=3.3.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda + libcurl: '>=8.10.1,<9.0a0' + libgcc: '>=13' + libgfortran: '' + libgfortran5: '>=13.3.0' + libstdcxx: '>=13' + libzlib: '>=1.3.1,<2.0a0' + openssl: '>=3.4.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h2d575fe_108.conda hash: - md5: 7e1729554e209627636a0f6fabcdd115 - sha256: 2278fa07da6f96e807d402cd55480624d67d2dee202191aaaf278ce5ab23605a + md5: b74598031529dafb2a66f9e90f26f2dc + sha256: 340b997d57eb89c058d8f2e80d426e4716661a51efcd1d857afb2b29f59177a4 category: main optional: false - name: hdf5 @@ -5290,16 +5229,16 @@ package: dependencies: __osx: '>=11.0' libaec: '>=1.1.3,<2.0a0' - libcurl: '>=8.8.0,<9.0a0' - libcxx: '>=16' + libcurl: '>=8.10.1,<9.0a0' + libcxx: '>=18' libgfortran: 5.* libgfortran5: '>=13.2.0' - libzlib: '>=1.2.13,<2.0a0' - openssl: '>=3.3.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/hdf5-1.14.3-nompi_hec07895_105.conda + libzlib: '>=1.3.1,<2.0a0' + openssl: '>=3.4.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/hdf5-1.14.3-nompi_ha698983_108.conda hash: - md5: f9c8c7304d52c8846eab5d6c34219812 - sha256: 5d87a1b63862e7da78c7bd9c17dea3526c0462c11df9004943cfa4569cc25dd3 + md5: 5c9753c3ecfb975480ead5aa07903085 + sha256: b080cf87687bfb0be6f73ecf95f92525ec6fc03527b1cad3fdcedad3d9ef87d5 category: main optional: false - name: hpack @@ -5307,11 +5246,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda hash: - md5: 914d6646c4dbb1fd3ff539830a12fd71 - sha256: 5dec948932c4f740674b1afb551223ada0c55103f4c7bf86a110454da3d27cb8 + md5: 2aa5ff7fa34a81b9196532c84c10d865 + sha256: ec89b7e5b8aa2f0219f666084446e1fb7b54545861e9caa892acb24d125761b5 category: main optional: false - name: hpack @@ -5319,49 +5258,49 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda hash: - md5: 914d6646c4dbb1fd3ff539830a12fd71 - sha256: 5dec948932c4f740674b1afb551223ada0c55103f4c7bf86a110454da3d27cb8 + md5: 2aa5ff7fa34a81b9196532c84c10d865 + sha256: ec89b7e5b8aa2f0219f666084446e1fb7b54545861e9caa892acb24d125761b5 category: main optional: false - name: httpcore - version: 1.0.6 + version: 1.0.7 manager: conda platform: linux-64 dependencies: - anyio: '>=3.0,<5.0' - certifi: '' + python: '>=3.8' h11: '>=0.13,<0.15' h2: '>=3,<5' - python: '>=3.8' sniffio: 1.* - url: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda + anyio: '>=3.0,<5.0' + certifi: '' + url: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda hash: - md5: b8e1901ef9a215fc41ecfb6bef7e0943 - sha256: 8952c3f1eb18bf4d7e813176c3b23e0af4e863e8b05087e73f74f371d73077ca + md5: 2ca8e6dbc86525c8b95e3c0ffa26442e + sha256: c84d012a245171f3ed666a8bf9319580c269b7843ffa79f26468842da3abd5df category: main optional: false - name: httpcore - version: 1.0.6 + version: 1.0.7 manager: conda platform: osx-arm64 dependencies: - anyio: '>=3.0,<5.0' certifi: '' - h11: '>=0.13,<0.15' - h2: '>=3,<5' python: '>=3.8' sniffio: 1.* - url: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.6-pyhd8ed1ab_0.conda + h2: '>=3,<5' + anyio: '>=3.0,<5.0' + h11: '>=0.13,<0.15' + url: https://conda.anaconda.org/conda-forge/noarch/httpcore-1.0.7-pyh29332c3_1.conda hash: - md5: b8e1901ef9a215fc41ecfb6bef7e0943 - sha256: 8952c3f1eb18bf4d7e813176c3b23e0af4e863e8b05087e73f74f371d73077ca + md5: 2ca8e6dbc86525c8b95e3c0ffa26442e + sha256: c84d012a245171f3ed666a8bf9319580c269b7843ffa79f26468842da3abd5df category: main optional: false - name: httpx - version: 0.27.2 + version: 0.28.1 manager: conda platform: linux-64 dependencies: @@ -5369,69 +5308,67 @@ package: certifi: '' httpcore: 1.* idna: '' - python: '>=3.8' - sniffio: '' - url: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda hash: - md5: 7e9ac3faeebdbd7b53b462c41891e7f7 - sha256: 1a33f160548bf447e15c0273899d27e4473f1d5b7ca1441232ec2d9d07c56d03 + md5: d6989ead454181f4f9bc987d3dc4e285 + sha256: cd0f1de3697b252df95f98383e9edb1d00386bfdd03fdf607fa42fe5fcb09950 category: main optional: false - name: httpx - version: 0.27.2 + version: 0.28.1 manager: conda platform: osx-arm64 dependencies: - anyio: '' certifi: '' - httpcore: 1.* idna: '' - python: '>=3.8' - sniffio: '' - url: https://conda.anaconda.org/conda-forge/noarch/httpx-0.27.2-pyhd8ed1ab_0.conda + anyio: '' + python: '>=3.9' + httpcore: 1.* + url: https://conda.anaconda.org/conda-forge/noarch/httpx-0.28.1-pyhd8ed1ab_0.conda hash: - md5: 7e9ac3faeebdbd7b53b462c41891e7f7 - sha256: 1a33f160548bf447e15c0273899d27e4473f1d5b7ca1441232ec2d9d07c56d03 + md5: d6989ead454181f4f9bc987d3dc4e285 + sha256: cd0f1de3697b252df95f98383e9edb1d00386bfdd03fdf607fa42fe5fcb09950 category: main optional: false - name: huggingface_hub - version: 0.25.2 + version: 0.26.5 manager: conda platform: linux-64 dependencies: filelock: '' fsspec: '>=2023.5.0' packaging: '>=20.9' - python: '>=3.8' + python: '>=3.9' pyyaml: '>=5.1' requests: '' tqdm: '>=4.42.1' typing-extensions: '>=3.7.4.3' typing_extensions: '>=3.7.4.3' - url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.25.2-pyh0610db2_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.26.5-pyhd8ed1ab_1.conda hash: - md5: 84958b2d58dbe7402a3879325792a145 - sha256: abbf94a1044f20e93f69d2532b22069aa93ce04b334f2971ba9c7385d7ef6271 + md5: 73937038e21117fe401f8ea64fbaeacc + sha256: 0c75532d914a04c73222be298ed2c6868739dd475b1b1a9137c52abe79873952 category: main optional: false - name: huggingface_hub - version: 0.25.2 + version: 0.26.5 manager: conda platform: osx-arm64 dependencies: + requests: '' filelock: '' - fsspec: '>=2023.5.0' + python: '>=3.9' + pyyaml: '>=5.1' packaging: '>=20.9' - python: '>=3.8' - pyyaml: '>=5.1' - requests: '' - tqdm: '>=4.42.1' + fsspec: '>=2023.5.0' typing-extensions: '>=3.7.4.3' typing_extensions: '>=3.7.4.3' - url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.25.2-pyh0610db2_0.conda + tqdm: '>=4.42.1' + url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.26.5-pyhd8ed1ab_1.conda hash: - md5: 84958b2d58dbe7402a3879325792a145 - sha256: abbf94a1044f20e93f69d2532b22069aa93ce04b334f2971ba9c7385d7ef6271 + md5: 73937038e21117fe401f8ea64fbaeacc + sha256: 0c75532d914a04c73222be298ed2c6868739dd475b1b1a9137c52abe79873952 category: main optional: false - name: hyperframe @@ -5439,11 +5376,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda hash: - md5: 9f765cbfab6870c8435b9eefecd7a1f4 - sha256: e374a9d0f53149328134a8d86f5d72bca4c6dcebed3c0ecfa968c02996289330 + md5: 566e75c90c1d0c8c459eb0ad9833dc7a + sha256: e91c6ef09d076e1d9a02819cd00fa7ee18ecf30cdd667605c853980216584d1b category: main optional: false - name: hyperframe @@ -5451,11 +5388,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda hash: - md5: 9f765cbfab6870c8435b9eefecd7a1f4 - sha256: e374a9d0f53149328134a8d86f5d72bca4c6dcebed3c0ecfa968c02996289330 + md5: 566e75c90c1d0c8c459eb0ad9833dc7a + sha256: e91c6ef09d076e1d9a02819cd00fa7ee18ecf30cdd667605c853980216584d1b category: main optional: false - name: icu @@ -5484,29 +5421,29 @@ package: category: main optional: false - name: identify - version: 2.6.1 + version: 2.6.3 manager: conda platform: linux-64 dependencies: python: '>=3.6' ukkonen: '' - url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_0.conda hash: - md5: 43f629202f9eec21be5f71171fb5daf8 - sha256: dc752392f327e64e32bc3122758b2d8951aec9d6e6aa888463c73d18a10e3c56 + md5: dd3acd023fc358afab730866a0e5e3f5 + sha256: 2350107285349caad1a5c5c5296a1335b8649d6b1b0e8f2bde18127c404471c5 category: main optional: false - name: identify - version: 2.6.1 + version: 2.6.3 manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' ukkonen: '' - url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_0.conda hash: - md5: 43f629202f9eec21be5f71171fb5daf8 - sha256: dc752392f327e64e32bc3122758b2d8951aec9d6e6aa888463c73d18a10e3c56 + md5: dd3acd023fc358afab730866a0e5e3f5 + sha256: 2350107285349caad1a5c5c5296a1335b8649d6b1b0e8f2bde18127c404471c5 category: main optional: false - name: idna @@ -5514,11 +5451,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda hash: - md5: 7ba2ede0e7c795ff95088daf0dc59753 - sha256: 8c57fd68e6be5eecba4462e983aed7e85761a519aab80e834bbd7794d4b545b2 + md5: 39a4f67be3286c86d696df570b1201b7 + sha256: d7a472c9fd479e2e8dcb83fb8d433fce971ea369d704ece380e876f9c3494e87 category: main optional: false - name: idna @@ -5526,11 +5463,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda hash: - md5: 7ba2ede0e7c795ff95088daf0dc59753 - sha256: 8c57fd68e6be5eecba4462e983aed7e85761a519aab80e834bbd7794d4b545b2 + md5: 39a4f67be3286c86d696df570b1201b7 + sha256: d7a472c9fd479e2e8dcb83fb8d433fce971ea369d704ece380e876f9c3494e87 category: main optional: false - name: imagesize @@ -5562,12 +5499,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' zipp: '>=0.5' - url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda hash: - md5: 54198435fce4d64d8a89af22573012a8 - sha256: 7194700ce1a5ad2621fd68e894dd8c1ceaff9a38723e6e0e5298fdef13017b1c + md5: 315607a3030ad5d5227e76e0733798ff + sha256: 13766b88fc5b23581530d3a0287c0c58ad82f60401afefab283bf158d2be55a9 category: main optional: false - name: importlib-metadata @@ -5575,12 +5512,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' + python: '>=3.9' zipp: '>=0.5' - url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda hash: - md5: 54198435fce4d64d8a89af22573012a8 - sha256: 7194700ce1a5ad2621fd68e894dd8c1ceaff9a38723e6e0e5298fdef13017b1c + md5: 315607a3030ad5d5227e76e0733798ff + sha256: 13766b88fc5b23581530d3a0287c0c58ad82f60401afefab283bf158d2be55a9 category: main optional: false - name: importlib-resources @@ -5589,11 +5526,11 @@ package: platform: linux-64 dependencies: importlib_resources: '>=6.4.5,<6.4.6.0a0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_1.conda hash: - md5: 67f4772681cf86652f3e2261794cf045 - sha256: b5a63a3e2bc2c8d3e5978a6ef4efaf2d6b02803c1bce3c2eb42e238dd91afe0b + md5: 59561d9b70f9df3b884c29910eba6593 + sha256: 6f0dd1966593ac8b9c9cc86a6c38febd1001048cc911c1cad0838d6297b5711d category: main optional: false - name: importlib-resources @@ -5601,12 +5538,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' importlib_resources: '>=6.4.5,<6.4.6.0a0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.5-pyhd8ed1ab_1.conda hash: - md5: 67f4772681cf86652f3e2261794cf045 - sha256: b5a63a3e2bc2c8d3e5978a6ef4efaf2d6b02803c1bce3c2eb42e238dd91afe0b + md5: 59561d9b70f9df3b884c29910eba6593 + sha256: 6f0dd1966593ac8b9c9cc86a6c38febd1001048cc911c1cad0838d6297b5711d category: main optional: false - name: importlib_metadata @@ -5615,10 +5552,10 @@ package: platform: linux-64 dependencies: importlib-metadata: '>=8.5.0,<8.5.1.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda hash: - md5: 2a92e152208121afadf85a5e1f3a5f4d - sha256: 313b8a05211bacd6b15ab2621cb73d7f41ea5c6cae98db53367d47833f03fef1 + md5: c70dd0718dbccdcc6d5828de3e71399d + sha256: 204fc7f02be8acda93073f5126b9707b8847b673d4c6558db208973c92f9af3c category: main optional: false - name: importlib_metadata @@ -5627,10 +5564,10 @@ package: platform: osx-arm64 dependencies: importlib-metadata: '>=8.5.0,<8.5.1.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.5.0-hd8ed1ab_1.conda hash: - md5: 2a92e152208121afadf85a5e1f3a5f4d - sha256: 313b8a05211bacd6b15ab2621cb73d7f41ea5c6cae98db53367d47833f03fef1 + md5: c70dd0718dbccdcc6d5828de3e71399d + sha256: 204fc7f02be8acda93073f5126b9707b8847b673d4c6558db208973c92f9af3c category: main optional: false - name: importlib_resources @@ -5638,12 +5575,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' zipp: '>=3.1.0' - url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_1.conda hash: - md5: c808991d29b9838fb4d96ce8267ec9ec - sha256: 2cb9db3e40033c3df72d3defc678a012840378fd55a67e4351363d4b321a0dc1 + md5: 15798fa69312d433af690c8c42b3fb36 + sha256: 461199e429a3db01f0a673f8beaac5e0be75b88895952fb9183f2ab01c5c3c24 category: main optional: false - name: importlib_resources @@ -5651,12 +5588,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' + python: '>=3.9' zipp: '>=3.1.0' - url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_1.conda hash: - md5: c808991d29b9838fb4d96ce8267ec9ec - sha256: 2cb9db3e40033c3df72d3defc678a012840378fd55a67e4351363d4b321a0dc1 + md5: 15798fa69312d433af690c8c42b3fb36 + sha256: 461199e429a3db01f0a673f8beaac5e0be75b88895952fb9183f2ab01c5c3c24 category: main optional: false - name: ipykernel @@ -5689,20 +5626,20 @@ package: manager: conda platform: osx-arm64 dependencies: + packaging: '' + psutil: '' + nest-asyncio: '' __osx: '' appnope: '' - comm: '>=0.1.1' - debugpy: '>=1.6.5' - ipython: '>=7.23.1' + python: '>=3.8' + tornado: '>=6.1' jupyter_client: '>=6.1.12' jupyter_core: '>=4.12,!=5.0.*' + ipython: '>=7.23.1' matplotlib-inline: '>=0.1' - nest-asyncio: '' - packaging: '' - psutil: '' - python: '>=3.8' + debugpy: '>=1.6.5' pyzmq: '>=24' - tornado: '>=6.1' + comm: '>=0.1.1' traitlets: '>=5.4.0' url: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.5-pyh57ce528_0.conda hash: @@ -5739,19 +5676,19 @@ package: manager: conda platform: osx-arm64 dependencies: + typing_extensions: '' __unix: '' decorator: '' exceptiongroup: '' - jedi: '>=0.16' matplotlib-inline: '' - pexpect: '>4.3' + stack_data: '' pickleshare: '' - prompt-toolkit: '>=3.0.41,<3.1.0' - pygments: '>=2.4.0' python: '>=3.9' - stack_data: '' + pygments: '>=2.4.0' + jedi: '>=0.16' traitlets: '>=5' - typing_extensions: '' + pexpect: '>4.3' + prompt-toolkit: '>=3.0.41,<3.1.0' url: https://conda.anaconda.org/conda-forge/noarch/ipython-8.18.1-pyh707e725_3.conda hash: md5: 15c6f45a45f7ac27f6d60b0b084f6761 @@ -5766,13 +5703,13 @@ package: comm: '>=0.1.3' ipython: '>=6.1.0' jupyterlab_widgets: '>=3.0.13,<3.1.0' - python: '>=3.7' + python: '>=3.9' traitlets: '>=4.3.1' widgetsnbextension: '>=4.0.13,<4.1.0' - url: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_1.conda hash: - md5: a022d34163147d16b27de86dc53e93fc - sha256: ae27447f300c85a184d5d4fa08674eaa93931c12275daca981eb986f5d7795b3 + md5: bb19ad65196475ab6d0bb3532d7f8d96 + sha256: f419657566e3d9bea85b288a0ce3a8e42d76cd82ac1697c6917891df3ae149ab category: main optional: false - name: ipywidgets @@ -5780,16 +5717,16 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' + traitlets: '>=4.3.1' comm: '>=0.1.3' ipython: '>=6.1.0' jupyterlab_widgets: '>=3.0.13,<3.1.0' - python: '>=3.7' - traitlets: '>=4.3.1' widgetsnbextension: '>=4.0.13,<4.1.0' - url: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/ipywidgets-8.1.5-pyhd8ed1ab_1.conda hash: - md5: a022d34163147d16b27de86dc53e93fc - sha256: ae27447f300c85a184d5d4fa08674eaa93931c12275daca981eb986f5d7795b3 + md5: bb19ad65196475ab6d0bb3532d7f8d96 + sha256: f419657566e3d9bea85b288a0ce3a8e42d76cd82ac1697c6917891df3ae149ab category: main optional: false - name: isoduration @@ -5798,11 +5735,11 @@ package: platform: linux-64 dependencies: arrow: '>=0.15.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda hash: - md5: 4cb68948e0b8429534380243d063a27a - sha256: 7bb5c4d994361022f47a807b5e7d101b3dce16f7dd8a0af6ffad9f479d346493 + md5: 0b0154421989637d424ccf0f104be51a + sha256: 08e838d29c134a7684bca0468401d26840f41c92267c4126d7b43a6b533b0aed category: main optional: false - name: isoduration @@ -5810,12 +5747,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' arrow: '>=0.15.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/isoduration-20.11.0-pyhd8ed1ab_1.conda hash: - md5: 4cb68948e0b8429534380243d063a27a - sha256: 7bb5c4d994361022f47a807b5e7d101b3dce16f7dd8a0af6ffad9f479d346493 + md5: 0b0154421989637d424ccf0f104be51a + sha256: 08e838d29c134a7684bca0468401d26840f41c92267c4126d7b43a6b533b0aed category: main optional: false - name: itsdangerous @@ -5823,11 +5760,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.conda hash: - md5: ff7ca04134ee8dde1d7cf491a78ef7c7 - sha256: 4e933e36e9b0401b62ea8fd63393827ebeb4250de77a56687afb387d504523c5 + md5: 7ac5f795c15f288984e32add616cdc59 + sha256: 1684b7b16eec08efef5302ce298c606b163c18272b69a62b666fbaa61516f170 category: main optional: false - name: itsdangerous @@ -5835,37 +5772,37 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.conda hash: - md5: ff7ca04134ee8dde1d7cf491a78ef7c7 - sha256: 4e933e36e9b0401b62ea8fd63393827ebeb4250de77a56687afb387d504523c5 + md5: 7ac5f795c15f288984e32add616cdc59 + sha256: 1684b7b16eec08efef5302ce298c606b163c18272b69a62b666fbaa61516f170 category: main optional: false - name: jedi - version: 0.19.1 + version: 0.19.2 manager: conda platform: linux-64 dependencies: parso: '>=0.8.3,<0.9.0' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda hash: - md5: 81a3be0b2023e1ea8555781f0ad904a2 - sha256: 362f0936ef37dfd1eaa860190e42a6ebf8faa094eaa3be6aa4d9ace95f40047a + md5: a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 + sha256: 92c4d217e2dc68983f724aa983cca5464dcb929c566627b26a2511159667dba8 category: main optional: false - name: jedi - version: 0.19.1 + version: 0.19.2 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' parso: '>=0.8.3,<0.9.0' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda hash: - md5: 81a3be0b2023e1ea8555781f0ad904a2 - sha256: 362f0936ef37dfd1eaa860190e42a6ebf8faa094eaa3be6aa4d9ace95f40047a + md5: a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 + sha256: 92c4d217e2dc68983f724aa983cca5464dcb929c566627b26a2511159667dba8 category: main optional: false - name: jinja2 @@ -5874,11 +5811,11 @@ package: platform: linux-64 dependencies: markupsafe: '>=2.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda hash: - md5: 7b86ecb7d3557821c649b3c31e3eb9f2 - sha256: 27380d870d42d00350d2d52598cddaf02f9505fb24be09488da0c9b8d1428f2d + md5: 08cce3151bde4ecad7885bd9fb647532 + sha256: 85a7169c078b8065bd9d121b0e7b99c8b88c42a411314b6ae5fcd81c48c4710a category: main optional: false - name: jinja2 @@ -5886,16 +5823,16 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' markupsafe: '>=2.0' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda hash: - md5: 7b86ecb7d3557821c649b3c31e3eb9f2 - sha256: 27380d870d42d00350d2d52598cddaf02f9505fb24be09488da0c9b8d1428f2d + md5: 08cce3151bde4ecad7885bd9fb647532 + sha256: 85a7169c078b8065bd9d121b0e7b99c8b88c42a411314b6ae5fcd81c48c4710a category: main optional: false - name: jiter - version: 0.6.1 + version: 0.8.0 manager: conda platform: linux-64 dependencies: @@ -5903,24 +5840,24 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/jiter-0.6.1-py39he612d8f_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/jiter-0.8.0-py39he612d8f_0.conda hash: - md5: a868abb5a18a9d5c2675c6e592e28147 - sha256: 40ccf8cacbb98edd9c26aeab81a5b59c98cfb3f3e4d4f21202158998436aa8eb + md5: 51a1737fc83de7ba465a7f8dffa6c0f6 + sha256: f9551d69ee492e9e387a3e121dff9d8d9e9288632a12dbb7f0875d1a8641f2a5 category: main optional: false - name: jiter - version: 0.6.1 + version: 0.8.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/jiter-0.6.1-py39h9c3e640_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/jiter-0.8.0-py39hc40b5db_0.conda hash: - md5: 044c3a4e9c53b651b74a81f6d063df46 - sha256: 1dfddbcda3fce14ca2f8c7c523353989eebbaf072627482c4fb9f55baa8fda8f + md5: d8e7b830449fa2bb2746e3208517641f + sha256: a7856fb4314811b073748cf948a3fbefabb54816f64cda7b0f4e5c862409985c category: main optional: false - name: jmespath @@ -5928,11 +5865,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_1.conda hash: - md5: 2cfa3e1cf3fb51bb9b17acc5b5e9ea11 - sha256: 95ac5f9ee95fd4e34dc051746fc86016d3d4f6abefed113e2ede049d59ec2991 + md5: 972bdca8f30147135f951847b30399ea + sha256: 3d2f20ee7fd731e3ff55c189db9c43231bc8bde957875817a609c227bcb295c6 category: main optional: false - name: jmespath @@ -5940,11 +5877,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jmespath-1.0.1-pyhd8ed1ab_1.conda hash: - md5: 2cfa3e1cf3fb51bb9b17acc5b5e9ea11 - sha256: 95ac5f9ee95fd4e34dc051746fc86016d3d4f6abefed113e2ede049d59ec2991 + md5: 972bdca8f30147135f951847b30399ea + sha256: 3d2f20ee7fd731e3ff55c189db9c43231bc8bde957875817a609c227bcb295c6 category: main optional: false - name: joblib @@ -5952,12 +5889,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' setuptools: '' - url: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_1.conda hash: - md5: 25df261d4523d9f9783bcdb7208d872f - sha256: 8ad719524b1039510fcbd75eb776123189d75e2c09228189257ddbcab86f5b64 + md5: bf8243ee348f3a10a14ed0cae323e0c1 + sha256: 51cc2dc491668af0c4d9299b0ab750f16ccf413ec5e2391b924108c1fbacae9b category: main optional: false - name: joblib @@ -5965,12 +5902,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' setuptools: '' - url: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_1.conda hash: - md5: 25df261d4523d9f9783bcdb7208d872f - sha256: 8ad719524b1039510fcbd75eb776123189d75e2c09228189257ddbcab86f5b64 + md5: bf8243ee348f3a10a14ed0cae323e0c1 + sha256: 51cc2dc491668af0c4d9299b0ab750f16ccf413ec5e2391b924108c1fbacae9b category: main optional: false - name: json-c @@ -5999,27 +5936,27 @@ package: category: main optional: false - name: json5 - version: 0.9.25 + version: 0.10.0 manager: conda platform: linux-64 dependencies: - python: '>=3.7,<4.0' - url: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda hash: - md5: 5d8c241a9261e720a34a07a3e1ac4109 - sha256: 0c75e428970e8bb72ba1dd3a6dc32b8d68f6534b4fe16b38e53364963fdc8e38 + md5: cd170f82d8e5b355dfdea6adab23e4af + sha256: 61bca2dac194c44603446944745566d7b4e55407280f6f6cea8bbe4de26b558f category: main optional: false - name: json5 - version: 0.9.25 + version: 0.10.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.7,<4.0' - url: https://conda.anaconda.org/conda-forge/noarch/json5-0.9.25-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/json5-0.10.0-pyhd8ed1ab_1.conda hash: - md5: 5d8c241a9261e720a34a07a3e1ac4109 - sha256: 0c75e428970e8bb72ba1dd3a6dc32b8d68f6534b4fe16b38e53364963fdc8e38 + md5: cd170f82d8e5b355dfdea6adab23e4af + sha256: 61bca2dac194c44603446944745566d7b4e55407280f6f6cea8bbe4de26b558f category: main optional: false - name: jsonpatch @@ -6028,11 +5965,11 @@ package: platform: linux-64 dependencies: jsonpointer: '>=1.9' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda hash: - md5: bfdb7c5c6ad1077c82a69a8642c87aff - sha256: fbb17e33ace3225c6416d1604637c1058906b8223da968cc015128985336b2b4 + md5: cb60ae9cf02b9fcb8004dec4089e5691 + sha256: 304955757d1fedbe344af43b12b5467cca072f83cce6109361ba942e186b3993 category: main optional: false - name: jsonpatch @@ -6040,12 +5977,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' jsonpointer: '>=1.9' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonpatch-1.33-pyhd8ed1ab_1.conda hash: - md5: bfdb7c5c6ad1077c82a69a8642c87aff - sha256: fbb17e33ace3225c6416d1604637c1058906b8223da968cc015128985336b2b4 + md5: cb60ae9cf02b9fcb8004dec4089e5691 + sha256: 304955757d1fedbe344af43b12b5467cca072f83cce6109361ba942e186b3993 category: main optional: false - name: jsonpointer @@ -6107,13 +6044,13 @@ package: importlib_resources: '>=1.4.0' jsonschema-specifications: '>=2023.03.6' pkgutil-resolve-name: '>=1.3.10' - python: '>=3.8' + python: '>=3.9' referencing: '>=0.28.4' rpds-py: '>=0.7.1' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda hash: - md5: da304c192ad59975202859b367d0f6a2 - sha256: 7d0c4c0346b26be9f220682b7c5c0d84606d48c6dbc36fc238e4452dda733aff + md5: a3cead9264b331b32fe8f0aabc967522 + sha256: be992a99e589146f229c58fe5083e0b60551d774511c494f91fe011931bd7893 category: main optional: false - name: jsonschema @@ -6121,17 +6058,17 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' attrs: '>=22.2.0' importlib_resources: '>=1.4.0' - jsonschema-specifications: '>=2023.03.6' pkgutil-resolve-name: '>=1.3.10' - python: '>=3.8' + jsonschema-specifications: '>=2023.03.6' referencing: '>=0.28.4' rpds-py: '>=0.7.1' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda hash: - md5: da304c192ad59975202859b367d0f6a2 - sha256: 7d0c4c0346b26be9f220682b7c5c0d84606d48c6dbc36fc238e4452dda733aff + md5: a3cead9264b331b32fe8f0aabc967522 + sha256: be992a99e589146f229c58fe5083e0b60551d774511c494f91fe011931bd7893 category: main optional: false - name: jsonschema-specifications @@ -6139,12 +6076,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' referencing: '>=0.31.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda hash: - md5: 720745920222587ef942acfbc578b584 - sha256: 82f8bed0f21dc0b3aff40dd4e39d77e85b93b0417bc5659b001e0109341b8b98 + md5: 3b519bc21bc80e60b456f1e62962a766 + sha256: 37127133837444cf0e6d1a95ff5a505f8214ed4e89e8e9343284840e674c6891 category: main optional: false - name: jsonschema-specifications @@ -6152,12 +6089,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' + python: '>=3.9' referencing: '>=0.31.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda hash: - md5: 720745920222587ef942acfbc578b584 - sha256: 82f8bed0f21dc0b3aff40dd4e39d77e85b93b0417bc5659b001e0109341b8b98 + md5: 3b519bc21bc80e60b456f1e62962a766 + sha256: 37127133837444cf0e6d1a95ff5a505f8214ed4e89e8e9343284840e674c6891 category: main optional: false - name: jsonschema-with-format @@ -6174,10 +6111,10 @@ package: rfc3987: '' uri-template: '' webcolors: '>=24.6.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-4.23.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-4.23.0-hd8ed1ab_1.conda hash: - md5: 68d6333c2883fbd8939634bd1471bedf - sha256: fa989f078ba036774cb19ebdddbe4e915e1beddff1807bb80b1d34f681e087f5 + md5: fd508f21ddc16e8fd9de0dc980c017fb + sha256: 7856e6bf95aaf4ea5fc5caca3b0b3f796a15079eaaaf95c46623fd97a2833ba2 category: main optional: false - name: jsonschema-with-format @@ -6185,19 +6122,19 @@ package: manager: conda platform: osx-arm64 dependencies: - fqdn: '' idna: '' + rfc3339-validator: '' + uri-template: '' + fqdn: '' isoduration: '' + rfc3987: '' jsonpointer: '>1.13' jsonschema: '>=4.23.0,<4.23.1.0a0' - rfc3339-validator: '' - rfc3987: '' - uri-template: '' webcolors: '>=24.6.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-4.23.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-4.23.0-hd8ed1ab_1.conda hash: - md5: 68d6333c2883fbd8939634bd1471bedf - sha256: fa989f078ba036774cb19ebdddbe4e915e1beddff1807bb80b1d34f681e087f5 + md5: fd508f21ddc16e8fd9de0dc980c017fb + sha256: 7856e6bf95aaf4ea5fc5caca3b0b3f796a15079eaaaf95c46623fd97a2833ba2 category: main optional: false - name: jsonschema-with-format-nongpl @@ -6214,10 +6151,10 @@ package: rfc3986-validator: '>0.1.0' uri-template: '' webcolors: '>=24.6.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda hash: - md5: 16b37612b3a2fd77f409329e213b530c - sha256: 007a0a506a0d1805b099629cb0ee743ad0afe7d9749e57339f32c168119e0139 + md5: a5b1a8065857cc4bd8b7a38d063bb728 + sha256: 6e0184530011961a0802fda100ecdfd4b0eca634ed94c37e553b72e21c26627d category: main optional: false - name: jsonschema-with-format-nongpl @@ -6225,19 +6162,19 @@ package: manager: conda platform: osx-arm64 dependencies: - fqdn: '' idna: '' + rfc3339-validator: '' + uri-template: '' + fqdn: '' isoduration: '' jsonpointer: '>1.13' - jsonschema: '>=4.23.0,<4.23.1.0a0' - rfc3339-validator: '' rfc3986-validator: '>0.1.0' - uri-template: '' + jsonschema: '>=4.23.0,<4.23.1.0a0' webcolors: '>=24.6.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.23.0-hd8ed1ab_1.conda hash: - md5: 16b37612b3a2fd77f409329e213b530c - sha256: 007a0a506a0d1805b099629cb0ee743ad0afe7d9749e57339f32c168119e0139 + md5: a5b1a8065857cc4bd8b7a38d063bb728 + sha256: 6e0184530011961a0802fda100ecdfd4b0eca634ed94c37e553b72e21c26627d category: main optional: false - name: jupyter @@ -6251,11 +6188,11 @@ package: jupyterlab: '' nbconvert-core: '' notebook: '' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-1.1.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-1.1.1-pyhd8ed1ab_1.conda hash: - md5: 255a8fe52d1c57a6b46d0d16851883db - sha256: 5d92eb46552af180cd27a5e916206eb3f6725a0ae3d4bafa7a5f44adfada4332 + md5: 9453512288d20847de4356327d0e1282 + sha256: b538e15067d05768d1c0532a6d9b0625922a1cce751dd6a2af04f7233a1a70e9 category: main optional: false - name: jupyter @@ -6263,21 +6200,21 @@ package: manager: conda platform: osx-arm64 dependencies: - ipykernel: '' ipywidgets: '' - jupyter_console: '' jupyterlab: '' - nbconvert-core: '' notebook: '' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-1.1.1-pyhd8ed1ab_0.conda + ipykernel: '' + nbconvert-core: '' + jupyter_console: '' + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-1.1.1-pyhd8ed1ab_1.conda hash: - md5: 255a8fe52d1c57a6b46d0d16851883db - sha256: 5d92eb46552af180cd27a5e916206eb3f6725a0ae3d4bafa7a5f44adfada4332 + md5: 9453512288d20847de4356327d0e1282 + sha256: b538e15067d05768d1c0532a6d9b0625922a1cce751dd6a2af04f7233a1a70e9 category: main optional: false - name: jupyter-cache - version: 1.0.0 + version: 1.0.1 manager: conda platform: linux-64 dependencies: @@ -6290,30 +6227,30 @@ package: pyyaml: '' sqlalchemy: '>=1.3.12,<3' tabulate: '' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.1-pyhff2d567_0.conda hash: - md5: b667cf7b57baa559f628d374f017fa32 - sha256: 16dd4d3601d0532bbe755267486d62f7c77e099d0f0517e20ef635f836425d57 + md5: b0ee650829b8974202a7abe7f8b81e5a + sha256: 054d397dd45ed08bffb0976702e553dfb0d0b0a477da9cff36e2ea702e928f48 category: main optional: false - name: jupyter-cache - version: 1.0.0 + version: 1.0.1 manager: conda platform: osx-arm64 dependencies: - attrs: '' + pyyaml: '' click: '' + tabulate: '' importlib-metadata: '' - nbclient: '>=0.2' + attrs: '' nbformat: '' python: '>=3.9' - pyyaml: '' sqlalchemy: '>=1.3.12,<3' - tabulate: '' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.0-pyhd8ed1ab_0.conda + nbclient: '>=0.2' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-cache-1.0.1-pyhff2d567_0.conda hash: - md5: b667cf7b57baa559f628d374f017fa32 - sha256: 16dd4d3601d0532bbe755267486d62f7c77e099d0f0517e20ef635f836425d57 + md5: b0ee650829b8974202a7abe7f8b81e5a + sha256: 054d397dd45ed08bffb0976702e553dfb0d0b0a477da9cff36e2ea702e928f48 category: main optional: false - name: jupyter-lsp @@ -6323,11 +6260,11 @@ package: dependencies: importlib-metadata: '>=4.8.3' jupyter_server: '>=1.1.2' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda hash: - md5: 885867f6adab3d7ecdf8ab6ca0785f51 - sha256: 2151c2c63e0442a4c69ee0ad8a634195eedab10b7b74c0ec8266471842239a93 + md5: 0b4c3908e5a38ea22ebb98ee5888c768 + sha256: 1565c8b1423a37fca00fe0ab2a17cd8992c2ecf23e7867a1c9f6f86a9831c196 category: main optional: false - name: jupyter-lsp @@ -6335,13 +6272,13 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' importlib-metadata: '>=4.8.3' jupyter_server: '>=1.1.2' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.5-pyhd8ed1ab_1.conda hash: - md5: 885867f6adab3d7ecdf8ab6ca0785f51 - sha256: 2151c2c63e0442a4c69ee0ad8a634195eedab10b7b74c0ec8266471842239a93 + md5: 0b4c3908e5a38ea22ebb98ee5888c768 + sha256: 1565c8b1423a37fca00fe0ab2a17cd8992c2ecf23e7867a1c9f6f86a9831c196 category: main optional: false - name: jupyter_client @@ -6351,15 +6288,15 @@ package: dependencies: importlib-metadata: '>=4.8.3' jupyter_core: '>=4.12,!=5.0.*' - python: '>=3.8' + python: '>=3.9' python-dateutil: '>=2.8.2' pyzmq: '>=23.0' tornado: '>=6.2' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda hash: - md5: a14218cfb29662b4a19ceb04e93e298e - sha256: 4419c85e209a715f551a5c9bead746f29ee9d0fc41e772a76db3868622795671 + md5: 4ebae00eae9705b0c3d6d1018a81d047 + sha256: 19d8bd5bb2fde910ec59e081eeb59529491995ce0d653a5209366611023a0b3a category: main optional: false - name: jupyter_client @@ -6367,17 +6304,17 @@ package: manager: conda platform: osx-arm64 dependencies: - importlib-metadata: '>=4.8.3' - jupyter_core: '>=4.12,!=5.0.*' - python: '>=3.8' + python: '>=3.9' python-dateutil: '>=2.8.2' - pyzmq: '>=23.0' - tornado: '>=6.2' + jupyter_core: '>=4.12,!=5.0.*' + importlib-metadata: '>=4.8.3' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda + tornado: '>=6.2' + pyzmq: '>=23.0' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda hash: - md5: a14218cfb29662b4a19ceb04e93e298e - sha256: 4419c85e209a715f551a5c9bead746f29ee9d0fc41e772a76db3868622795671 + md5: 4ebae00eae9705b0c3d6d1018a81d047 + sha256: 19d8bd5bb2fde910ec59e081eeb59529491995ce0d653a5209366611023a0b3a category: main optional: false - name: jupyter_console @@ -6391,13 +6328,13 @@ package: jupyter_core: '>=4.12,!=5.0.*' prompt_toolkit: '>=3.0.30' pygments: '' - python: '>=3.7' + python: '>=3.9' pyzmq: '>=17' traitlets: '>=5.4' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_console-6.6.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_console-6.6.3-pyhd8ed1ab_1.conda hash: - md5: 7cf6f52a66f8e3cd9d8b6c231262dcab - sha256: 4e51764d5fe2f6e43d83bcfbcf8b4da6569721bf82eaf4d647be8717cd6be75a + md5: 801dbf535ec26508fac6d4b24adfb76e + sha256: aee0cdd0cb2b9321d28450aec4e0fd43566efcd79e862d70ce49a68bf0539bcd category: main optional: false - name: jupyter_console @@ -6405,19 +6342,19 @@ package: manager: conda platform: osx-arm64 dependencies: - ipykernel: '>=6.14' ipython: '' - jupyter_client: '>=7.0.0' - jupyter_core: '>=4.12,!=5.0.*' - prompt_toolkit: '>=3.0.30' pygments: '' - python: '>=3.7' + python: '>=3.9' pyzmq: '>=17' + jupyter_core: '>=4.12,!=5.0.*' + jupyter_client: '>=7.0.0' + ipykernel: '>=6.14' traitlets: '>=5.4' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_console-6.6.3-pyhd8ed1ab_0.conda + prompt_toolkit: '>=3.0.30' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_console-6.6.3-pyhd8ed1ab_1.conda hash: - md5: 7cf6f52a66f8e3cd9d8b6c231262dcab - sha256: 4e51764d5fe2f6e43d83bcfbcf8b4da6569721bf82eaf4d647be8717cd6be75a + md5: 801dbf535ec26508fac6d4b24adfb76e + sha256: aee0cdd0cb2b9321d28450aec4e0fd43566efcd79e862d70ce49a68bf0539bcd category: main optional: false - name: jupyter_core @@ -6441,9 +6378,9 @@ package: platform: osx-arm64 dependencies: __unix: '' - platformdirs: '>=2.5' python: '>=3.8' traitlets: '>=5.3' + platformdirs: '>=2.5' url: https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda hash: md5: 0a2980dada0dd7fd0998f0342308b1b1 @@ -6456,17 +6393,17 @@ package: platform: linux-64 dependencies: jsonschema-with-format-nongpl: '>=4.18.0' - python: '>=3.8' + python: '>=3.9' python-json-logger: '>=2.0.4' pyyaml: '>=5.3' referencing: '' rfc3339-validator: '' rfc3986-validator: '>=0.1.1' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_1.conda hash: - md5: ed45423c41b3da15ea1df39b1f80c2ca - sha256: cd3f41dc093162a41d4bae171e40a1b9b115c4d488e9bb837a8fa9d084931fb9 + md5: 62186e6383f38cc6a3466f0fadde3f2e + sha256: d7fa4c627d56ce8dc02f09f358757f8fd49eb6137216dc99340a6b4efc7e0491 category: main optional: false - name: jupyter_events @@ -6474,18 +6411,18 @@ package: manager: conda platform: osx-arm64 dependencies: - jsonschema-with-format-nongpl: '>=4.18.0' - python: '>=3.8' - python-json-logger: '>=2.0.4' - pyyaml: '>=5.3' referencing: '' rfc3339-validator: '' + python: '>=3.9' + pyyaml: '>=5.3' rfc3986-validator: '>=0.1.1' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_0.conda + python-json-logger: '>=2.0.4' + jsonschema-with-format-nongpl: '>=4.18.0' + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_events-0.10.0-pyhd8ed1ab_1.conda hash: - md5: ed45423c41b3da15ea1df39b1f80c2ca - sha256: cd3f41dc093162a41d4bae171e40a1b9b115c4d488e9bb837a8fa9d084931fb9 + md5: 62186e6383f38cc6a3466f0fadde3f2e + sha256: d7fa4c627d56ce8dc02f09f358757f8fd49eb6137216dc99340a6b4efc7e0491 category: main optional: false - name: jupyter_server @@ -6505,17 +6442,17 @@ package: overrides: '>=5.0' packaging: '>=22.0' prometheus_client: '>=0.9' - python: '>=3.8' + python: '>=3.9' pyzmq: '>=24' send2trash: '>=1.8.2' terminado: '>=0.8.3' tornado: '>=6.2.0' traitlets: '>=5.6.0' websocket-client: '>=1.7' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_1.conda hash: - md5: ca23c71f70a7c7935b3d03f0f1a5801d - sha256: edab71a05feceac54bdb90e755a257545af7832b9911607c1a70f09be44ba985 + md5: 81ea84b3212287f926e35b9036192963 + sha256: 082d3517455339c8baea245a257af249758ccec26b8832d969ac928901c234cc category: main optional: false - name: jupyter_server @@ -6523,29 +6460,29 @@ package: manager: conda platform: osx-arm64 dependencies: - anyio: '>=3.1.0' - argon2-cffi: '>=21.1' + python: '>=3.9' + terminado: '>=0.8.3' + jupyter_core: '>=4.12,!=5.0.*' + tornado: '>=6.2.0' jinja2: '>=3.0.3' + packaging: '>=22.0' + pyzmq: '>=24' + nbconvert-core: '>=6.4.4' jupyter_client: '>=7.4.4' - jupyter_core: '>=4.12,!=5.0.*' + nbformat: '>=5.3.0' + traitlets: '>=5.6.0' + anyio: '>=3.1.0' + send2trash: '>=1.8.2' jupyter_events: '>=0.9.0' + argon2-cffi: '>=21.1' jupyter_server_terminals: '>=0.4.4' - nbconvert-core: '>=6.4.4' - nbformat: '>=5.3.0' overrides: '>=5.0' - packaging: '>=22.0' prometheus_client: '>=0.9' - python: '>=3.8' - pyzmq: '>=24' - send2trash: '>=1.8.2' - terminado: '>=0.8.3' - tornado: '>=6.2.0' - traitlets: '>=5.6.0' websocket-client: '>=1.7' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.14.2-pyhd8ed1ab_1.conda hash: - md5: ca23c71f70a7c7935b3d03f0f1a5801d - sha256: edab71a05feceac54bdb90e755a257545af7832b9911607c1a70f09be44ba985 + md5: 81ea84b3212287f926e35b9036192963 + sha256: 082d3517455339c8baea245a257af249758ccec26b8832d969ac928901c234cc category: main optional: false - name: jupyter_server_terminals @@ -6553,12 +6490,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' terminado: '>=0.8.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda hash: - md5: 219b3833aa8ed91d47d1be6ca03f30be - sha256: 038efbc7e4b2e72d49ed193cfb2bbbe9fbab2459786ce9350301f466a32567db + md5: 2d983ff1b82a1ccb6f2e9d8784bdd6bd + sha256: 0890fc79422191bc29edf17d7b42cff44ba254aa225d31eb30819f8772b775b8 category: main optional: false - name: jupyter_server_terminals @@ -6566,23 +6503,22 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' + python: '>=3.9' terminado: '>=0.8.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.3-pyhd8ed1ab_1.conda hash: - md5: 219b3833aa8ed91d47d1be6ca03f30be - sha256: 038efbc7e4b2e72d49ed193cfb2bbbe9fbab2459786ce9350301f466a32567db + md5: 2d983ff1b82a1ccb6f2e9d8784bdd6bd + sha256: 0890fc79422191bc29edf17d7b42cff44ba254aa225d31eb30819f8772b775b8 category: main optional: false - name: jupyterlab - version: 4.2.5 + version: 4.3.3 manager: conda platform: linux-64 dependencies: async-lru: '>=1.0.0' httpx: '>=0.25.0' - importlib_metadata: '>=4.8.3' - importlib_resources: '>=1.4' + importlib-metadata: '>=4.8.3' ipykernel: '>=6.5.0' jinja2: '>=3.0.3' jupyter-lsp: '>=2.0.0' @@ -6591,43 +6527,42 @@ package: jupyterlab_server: '>=2.27.1,<3' notebook-shim: '>=0.2' packaging: '' - python: '>=3.8' - setuptools: '>=40.1.0' + python: '>=3.9' + setuptools: '>=40.8.0' tomli: '>=1.2.2' tornado: '>=6.2.0' traitlets: '' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.3-pyhd8ed1ab_0.conda hash: - md5: 594762eddc55b82feac6097165a88e3c - sha256: db08036a6fd846c178ebdce7327be1130bda10ac96113c17b04bce2bc4d67dda + md5: 0707e62d944a89c365ba11da4898f8af + sha256: 63aa00427abd4a3e7c1738257b8e296f5e0ba04a4a1ab9ff3bc186440c8b9fdc category: main optional: false - name: jupyterlab - version: 4.2.5 + version: 4.3.3 manager: conda platform: osx-arm64 dependencies: - async-lru: '>=1.0.0' - httpx: '>=0.25.0' - importlib_metadata: '>=4.8.3' - importlib_resources: '>=1.4' - ipykernel: '>=6.5.0' - jinja2: '>=3.0.3' - jupyter-lsp: '>=2.0.0' + packaging: '' + traitlets: '' jupyter_core: '' + python: '>=3.9' + tornado: '>=6.2.0' + tomli: '>=1.2.2' + jinja2: '>=3.0.3' + importlib-metadata: '>=4.8.3' jupyter_server: '>=2.4.0,<3' - jupyterlab_server: '>=2.27.1,<3' + jupyter-lsp: '>=2.0.0' + async-lru: '>=1.0.0' notebook-shim: '>=0.2' - packaging: '' - python: '>=3.8' - setuptools: '>=40.1.0' - tomli: '>=1.2.2' - tornado: '>=6.2.0' - traitlets: '' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.2.5-pyhd8ed1ab_0.conda + httpx: '>=0.25.0' + jupyterlab_server: '>=2.27.1,<3' + ipykernel: '>=6.5.0' + setuptools: '>=40.8.0' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.3.3-pyhd8ed1ab_0.conda hash: - md5: 594762eddc55b82feac6097165a88e3c - sha256: db08036a6fd846c178ebdce7327be1130bda10ac96113c17b04bce2bc4d67dda + md5: 0707e62d944a89c365ba11da4898f8af + sha256: 63aa00427abd4a3e7c1738257b8e296f5e0ba04a4a1ab9ff3bc186440c8b9fdc category: main optional: false - name: jupyterlab_pygments @@ -6636,11 +6571,11 @@ package: platform: linux-64 dependencies: pygments: '>=2.4.1,<3' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda hash: - md5: afcd1b53bcac8844540358e33f33d28f - sha256: 4aa622bbcf97e44cd1adf0100b7ff71b7e20268f043bdf6feae4d16152f1f242 + md5: fd312693df06da3578383232528c468d + sha256: dc24b900742fdaf1e077d9a3458fd865711de80bca95fe3c6d46610c532c6ef0 category: main optional: false - name: jupyterlab_pygments @@ -6648,12 +6583,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' pygments: '>=2.4.1,<3' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda hash: - md5: afcd1b53bcac8844540358e33f33d28f - sha256: 4aa622bbcf97e44cd1adf0100b7ff71b7e20268f043bdf6feae4d16152f1f242 + md5: fd312693df06da3578383232528c468d + sha256: dc24b900742fdaf1e077d9a3458fd865711de80bca95fe3c6d46610c532c6ef0 category: main optional: false - name: jupyterlab_server @@ -6668,12 +6603,12 @@ package: jsonschema: '>=4.18' jupyter_server: '>=1.21,<3' packaging: '>=21.3' - python: '>=3.8' + python: '>=3.9' requests: '>=2.31' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda hash: - md5: af8239bf1ba7e8c69b689f780f653488 - sha256: a23b26d1a35bccdb91b9232119e5f402624e1e1a252b0e64cc20c6eb5b87cefb + md5: 9dc4b2b0f41f0de41d27f3293e319357 + sha256: d03d0b7e23fa56d322993bc9786b3a43b88ccc26e58b77c756619a921ab30e86 category: main optional: false - name: jupyterlab_server @@ -6681,19 +6616,19 @@ package: manager: conda platform: osx-arm64 dependencies: - babel: '>=2.10' - importlib-metadata: '>=4.8.3' + python: '>=3.9' + packaging: '>=21.3' jinja2: '>=3.0.3' - json5: '>=0.9.0' - jsonschema: '>=4.18' - jupyter_server: '>=1.21,<3' - packaging: '>=21.3' - python: '>=3.8' + importlib-metadata: '>=4.8.3' requests: '>=2.31' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_0.conda + jupyter_server: '>=1.21,<3' + jsonschema: '>=4.18' + babel: '>=2.10' + json5: '>=0.9.0' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_server-2.27.3-pyhd8ed1ab_1.conda hash: - md5: af8239bf1ba7e8c69b689f780f653488 - sha256: a23b26d1a35bccdb91b9232119e5f402624e1e1a252b0e64cc20c6eb5b87cefb + md5: 9dc4b2b0f41f0de41d27f3293e319357 + sha256: d03d0b7e23fa56d322993bc9786b3a43b88ccc26e58b77c756619a921ab30e86 category: main optional: false - name: jupyterlab_widgets @@ -6701,11 +6636,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_1.conda hash: - md5: ccea946e6dce9f330fbf7fca97fe8de7 - sha256: 0e7ec7936d766f39d5a0a8eafc63f5543f488883ad3645246bc22db6d632566e + md5: b26e487434032d7f486277beb0cead3a + sha256: 206489e417408d2ffc2a7b245008b4735a8beb59df6c9109d4f77e7bc5969d5d category: main optional: false - name: jupyterlab_widgets @@ -6713,11 +6648,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-3.0.13-pyhd8ed1ab_1.conda hash: - md5: ccea946e6dce9f330fbf7fca97fe8de7 - sha256: 0e7ec7936d766f39d5a0a8eafc63f5543f488883ad3645246bc22db6d632566e + md5: b26e487434032d7f486277beb0cead3a + sha256: 206489e417408d2ffc2a7b245008b4735a8beb59df6c9109d4f77e7bc5969d5d category: main optional: false - name: jupytext @@ -6743,13 +6678,13 @@ package: manager: conda platform: osx-arm64 dependencies: - markdown-it-py: '>=1.0' - mdit-py-plugins: '' - nbformat: '' - packaging: '' - python: '>=3.8' pyyaml: '' + packaging: '' tomli: '' + nbformat: '' + mdit-py-plugins: '' + python: '>=3.8' + markdown-it-py: '>=1.0' url: https://conda.anaconda.org/conda-forge/noarch/jupytext-1.16.4-pyh80e38bb_0.conda hash: md5: 1df7fd1594a7f2f6496ff23834a099bf @@ -6771,20 +6706,6 @@ package: sha256: a45cb038fce2b6fa154cf0c71485a75b59cb1d8d6b0465bdcb23736aca6bf2ac category: main optional: false -- name: kealib - version: 1.5.3 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - hdf5: '>=1.14.3,<1.14.4.0a0' - libcxx: '>=17' - url: https://conda.anaconda.org/conda-forge/osx-arm64/kealib-1.5.3-h8edbb62_2.conda - hash: - md5: d5c581103f5433dd862acbf24facdf9b - sha256: 29fef9ff99514a34d8026da4be5289bc4d2526974df459b63e92445fca7fd55e - category: main - optional: false - name: keras version: 2.14.0 manager: conda @@ -6798,23 +6719,23 @@ package: category: main optional: false - name: keras - version: 3.6.0 + version: 3.7.0 manager: conda platform: osx-arm64 dependencies: - absl-py: '' + numpy: '' + packaging: '' h5py: '' - ml_dtypes: '' + rich: '' + absl-py: '' namex: '' - numpy: '' + ml_dtypes: '' optree: '' - packaging: '' python: '>=3.9' - rich: '' - url: https://conda.anaconda.org/conda-forge/noarch/keras-3.6.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/keras-3.7.0-pyh753f3f9_1.conda hash: - md5: d33abb538e5e53920649f1708fc8fa82 - sha256: 55299050aab040677de3950260b7587c8e7ab0cf9a5429bf9143bc72d8250420 + md5: a57cc257120ce3f63448d212b89890f2 + sha256: 9974f55453878ba95eb2d0b6655a0fdca2f37a528ac571e09145b67a98357ab5 category: main optional: false - name: keyutils @@ -6892,17 +6813,17 @@ package: category: main optional: false - name: kubernetes - version: 1.31.1 + version: 1.31.4 manager: conda platform: linux-64 dependencies: - kubernetes-client: 1.31.1 - kubernetes-node: 1.31.1 - kubernetes-server: 1.31.1 - url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-1.31.1-ha770c72_0.conda + kubernetes-client: 1.31.4 + kubernetes-node: 1.31.4 + kubernetes-server: 1.31.4 + url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-1.31.4-ha770c72_0.conda hash: - md5: 7c17707723c70a19460cd9519a7da9d3 - sha256: cc32b4d24bd4db8d17036dbdf4ec6f0c45c4f9fd089fd2d6d09513dfd11dc8af + md5: 28ab588985184e4eade3c641367c6aa5 + sha256: 92dc15ff333feb780292629abbd178cb917a5997933627aa58d6dfceedd696e9 category: main optional: false - name: kubernetes @@ -6920,16 +6841,16 @@ package: category: main optional: false - name: kubernetes-client - version: 1.31.1 + version: 1.31.4 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-client-1.31.1-h90cef28_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-client-1.31.4-h6d84b8b_0.conda hash: - md5: 62b304726442456d966866e3e1237aa6 - sha256: a0bd9304a947ead43b7ed0dfd401667e438216d26dca0538c021a96a058533f7 + md5: 2dc5c96f804c4d2229dd2f9f9e544522 + sha256: 88cc0f8eec6115f9139d5d30ebad3f3e1c67480bb327ace3e45e1853258b5a56 category: main optional: false - name: kubernetes-client @@ -6944,16 +6865,16 @@ package: category: main optional: false - name: kubernetes-node - version: 1.31.1 + version: 1.31.4 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-node-1.31.1-h90cef28_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-node-1.31.4-h6d84b8b_0.conda hash: - md5: e97df03e7544fb98c4dce733151752d5 - sha256: 4fc65c5d92f02834621cbd2077f7fe7d3f7af23b6babb898a26d10c310a40756 + md5: 81595b895ac4057e0f17537fa05b3aa6 + sha256: 9e274292d2ea18eafc4901e1343a4efd36bb8ed441d2cdd24498a3ec4d421006 category: main optional: false - name: kubernetes-node @@ -6968,17 +6889,17 @@ package: category: main optional: false - name: kubernetes-server - version: 1.31.1 + version: 1.31.4 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' - kubernetes-node: 1.31.1 + kubernetes-node: 1.31.4 libgcc: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-server-1.31.1-h90cef28_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/kubernetes-server-1.31.4-h6d84b8b_0.conda hash: - md5: d77e1404e12f94be5333bec9f51ff6d4 - sha256: 87caf1e7ccc72cf55e11d3c03ef43048d7285c1480df557c27776d48f18e78e9 + md5: d722eb9b53bdb65d029fd90f643456e5 + sha256: ff4ff75dcfa1b8642d89129c31eca5b10898364553d1e4a8815766a5e8a636f8 category: main optional: false - name: kubernetes-server @@ -7026,10 +6947,10 @@ package: platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' - url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda hash: - md5: 83e1364586ceb8d0739fbc85b5c95837 - sha256: 0c21387f9a411e3d1f7f2969026bacfece133c8f1e72faea9cde29c0c19e1f3a + md5: 048b02e3962f066da18efe3a21b77672 + sha256: 7c91cea91b13f4314d125d1bedb9d03a29ebbd5080ccdea70260363424646dbe category: main optional: false - name: lerc @@ -7071,16 +6992,16 @@ package: category: main optional: false - name: libabseil - version: '20240116.2' + version: '20240722.0' manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20240116.2-cxx17_h00cdb27_1.conda + libcxx: '>=17' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20240722.0-cxx17_hf9b8971_1.conda hash: - md5: f16963d88aed907af8b90878b8d8a05c - sha256: a9517c8683924f4b3b9380cdaa50fdd2009cd8d5f3918c92f64394238189d3cb + md5: 706da5e791c569a7b9814877098a6a0a + sha256: 90bf08a75506dfcf28a70977da8ab050bcf594cd02abd3a9d84a22c9e8161724 category: main optional: false - name: libaec @@ -7129,24 +7050,24 @@ package: category: main optional: false - name: libarchive - version: 3.7.4 + version: 3.7.7 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' bzip2: '>=1.0.8,<2.0a0' libiconv: '>=1.17,<2.0a0' - libxml2: '>=2.12.7,<3.0a0' - libzlib: '>=1.2.13,<2.0.0a0' + libxml2: '>=2.13.5,<3.0a0' + libzlib: '>=1.3.1,<2.0a0' lz4-c: '>=1.9.3,<1.10.0a0' lzo: '>=2.10,<3.0a0' - openssl: '>=3.3.0,<4.0a0' + openssl: '>=3.4.0,<4.0a0' xz: '>=5.2.6,<6.0a0' zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libarchive-3.7.4-h83d404f_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libarchive-3.7.7-h7c07d2a_0.conda hash: - md5: 8b604ee634caafd92f2ff2fab6a1f75a - sha256: 5301d7dc52c2e1f87b229606033c475caf87cd94ef5a5efb3af565a62b88127e + md5: 49b28e291693b70cf8a7e70f290834d8 + sha256: 10d5c755761c6823d20c6ddbd42292ef91f34e271b6ba3e78d0c5fa81c22b3ed category: main optional: false - name: libarrow @@ -7167,7 +7088,7 @@ package: libprotobuf: '>=3.21.12,<3.22.0a0' libstdcxx-ng: '>=12' libthrift: '>=0.18.1,<0.18.2.0a0' - libutf8proc: '>=2.8.0,<3.0a0' + libutf8proc: <2.9 libzlib: '>=1.2.13,<2.0.0a0' lz4-c: '>=1.9.3,<1.10.0a0' openssl: '>=3.1.2,<4.0a0' @@ -7183,85 +7104,85 @@ package: category: main optional: false - name: libarrow - version: 17.0.0 + version: 18.0.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - aws-crt-cpp: '>=0.28.3,<0.28.4.0a0' + aws-crt-cpp: '>=0.29.0,<0.29.1.0a0' aws-sdk-cpp: '>=1.11.407,<1.11.408.0a0' - azure-core-cpp: '>=1.13.0,<1.13.1.0a0' - azure-identity-cpp: '>=1.8.0,<1.8.1.0a0' - azure-storage-blobs-cpp: '>=12.12.0,<12.12.1.0a0' - azure-storage-files-datalake-cpp: '>=12.11.0,<12.11.1.0a0' + azure-core-cpp: '>=1.14.0,<1.14.1.0a0' + azure-identity-cpp: '>=1.10.0,<1.10.1.0a0' + azure-storage-blobs-cpp: '>=12.13.0,<12.13.1.0a0' + azure-storage-files-datalake-cpp: '>=12.12.0,<12.12.1.0a0' bzip2: '>=1.0.8,<2.0a0' glog: '>=0.7.1,<0.8.0a0' - libabseil: '>=20240116.2,<20240117.0a0' + libabseil: '>=20240722.0,<20240723.0a0' libbrotlidec: '>=1.1.0,<1.2.0a0' libbrotlienc: '>=1.1.0,<1.2.0a0' - libcxx: '>=17' - libgoogle-cloud: '>=2.29.0,<2.30.0a0' - libgoogle-cloud-storage: '>=2.29.0,<2.30.0a0' - libre2-11: '>=2023.9.1' - libutf8proc: '>=2.8.0,<3.0a0' + libcxx: '>=18' + libgoogle-cloud: '>=2.30.0,<2.31.0a0' + libgoogle-cloud-storage: '>=2.30.0,<2.31.0a0' + libre2-11: '>=2024.7.2' + libutf8proc: <2.9 libzlib: '>=1.3.1,<2.0a0' lz4-c: '>=1.9.3,<1.10.0a0' orc: '>=2.0.2,<2.0.3.0a0' re2: '' snappy: '>=1.2.1,<1.3.0a0' zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-17.0.0-hc6a7651_16_cpu.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-18.0.0-h6fea68a_0_cpu.conda hash: - md5: 05fecc4ae5930dc548327980a4bc7a83 - sha256: 1facd5aa7140031be0f68733ab5e413ea1505da40548e27a173b2407046f36b5 + md5: 64ff84a32d9fa037380459f0440f3d8e + sha256: ddd556d066216a1e3f157eaa0cedd811105bae706f98feaeef064569e889f40f category: main optional: false - name: libarrow-acero - version: 17.0.0 + version: 18.0.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libarrow: 17.0.0 - libcxx: '>=17' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-17.0.0-hf9b8971_16_cpu.conda + libarrow: 18.0.0 + libcxx: '>=18' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-acero-18.0.0-h286801f_0_cpu.conda hash: - md5: 319bd2a8c30dffa54d6ad69847f16de1 - sha256: c9ff43babc0acbd864584ed1720cf063715589e31e9e2024b90d2094d4f20d38 + md5: deab7a5984465e46176d289377025757 + sha256: 93014da94788f24710be8e457c49609cf8dc17cd91e5fb80285ce28cefce6b57 category: main optional: false - name: libarrow-dataset - version: 17.0.0 + version: 18.0.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libarrow: 17.0.0 - libarrow-acero: 17.0.0 - libcxx: '>=17' - libparquet: 17.0.0 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-17.0.0-hf9b8971_16_cpu.conda + libarrow: 18.0.0 + libarrow-acero: 18.0.0 + libcxx: '>=18' + libparquet: 18.0.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-dataset-18.0.0-h286801f_0_cpu.conda hash: - md5: 67ea0ef775de4c394c3c7db991297ffa - sha256: e77d3c6825384c232f61fd3602a32507b66410dbe8879cd69a89b0fc49489533 + md5: 719055efe1941ef666b3882e6a85a9bb + sha256: b204bb8d3c5d5a2ab74b9375086ebee91c0a500e2146aed01e8915a4eae2f140 category: main optional: false - name: libarrow-substrait - version: 17.0.0 + version: 18.0.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libabseil: '>=20240116.2,<20240117.0a0' - libarrow: 17.0.0 - libarrow-acero: 17.0.0 - libarrow-dataset: 17.0.0 - libcxx: '>=17' - libprotobuf: '>=4.25.3,<4.25.4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-17.0.0-hbf8b706_16_cpu.conda + libabseil: '>=20240722.0,<20240723.0a0' + libarrow: 18.0.0 + libarrow-acero: 18.0.0 + libarrow-dataset: 18.0.0 + libcxx: '>=18' + libprotobuf: '>=5.27.5,<5.27.6.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-substrait-18.0.0-hdcc9e87_0_cpu.conda hash: - md5: b739c909163c38f85f40f5650ab2aeb2 - sha256: 6880b3c8fb88ee6c0bbae34b0efea86567ccec1b8cd8a3662b8b8c6dfeb5e87a + md5: dd51b0ba8e9dc24f04362cca5a93569d + sha256: 6ea9df616248191a06fb4d078486f282b1807bd8eab3e4f380f04df46264cea2 category: main optional: false - name: libasprintf @@ -7292,16 +7213,32 @@ package: sha256: ccc7967e298ddf3124c8ad9741c7180dc6f778ae4135ec87978214f7b3c64dc2 category: main optional: false +- name: libavif16 + version: 1.1.1 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + aom: '>=3.9.1,<3.10.0a0' + dav1d: '>=1.2.1,<1.2.2.0a0' + rav1e: '>=0.6.6,<1.0a0' + svt-av1: '>=2.3.0,<2.3.1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libavif16-1.1.1-h45b7238_2.conda + hash: + md5: 7571064a60bc193ff5c25f36ed23394a + sha256: c671365e8c822d29b53f20c4573fdbc70f18b50ff9a4b5b2b6b3c8f7ad2ac2a9 + category: main + optional: false - name: libblas version: 3.9.0 manager: conda platform: linux-64 dependencies: - libopenblas: '>=0.3.27,<1.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda + libopenblas: '>=0.3.28,<1.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda hash: - md5: 80aea6603a6813b16ec119d00382b772 - sha256: 3097f7913bda527d4fe9f824182b314e130044e582455037fca6f4e97965d83c + md5: 8ea26d42ca88ec5258802715fe1ee10b + sha256: d6d12dc437d060f838820e9e61bf73baab651f91935ac594cf10beb9ef1b4450 category: main optional: false - name: libblas @@ -7309,11 +7246,11 @@ package: manager: conda platform: osx-arm64 dependencies: - libopenblas: '>=0.3.27,<1.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-24_osxarm64_openblas.conda + libopenblas: '>=0.3.28,<1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libblas-3.9.0-25_osxarm64_openblas.conda hash: - md5: 35cb711e7bc46ee5f3dd67af99ad1986 - sha256: 4739f7463efb12e6d71536d8b0285a8de5aaadcc442bfedb9d92d1b4cbc47847 + md5: f8cf4d920ff36ce471619010eff59cac + sha256: f1fb9a11af0b2878bd8804b4c77d3733c40076218bcbdb35f575b1c0c9fddf11 category: main optional: false - name: libbrotlicommon @@ -7398,10 +7335,10 @@ package: platform: linux-64 dependencies: libblas: 3.9.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda hash: - md5: f5b8822297c9c790cec0795ca1fc9be6 - sha256: 2a52bccc5b03cdf014d856d0b85dbd591faa335ab337d620cd6aded121d7153c + md5: 5dbd1b0fc0d01ec5e0e1fbe667281a11 + sha256: ab87b0477078837c91d9cda62a9faca18fba7c57cc77aa779ae24b3ac783b5dd category: main optional: false - name: libcblas @@ -7410,10 +7347,10 @@ package: platform: osx-arm64 dependencies: libblas: 3.9.0 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-24_osxarm64_openblas.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libcblas-3.9.0-25_osxarm64_openblas.conda hash: - md5: c8977086a19233153e454bb2b332a920 - sha256: 40dc3f7c44af5cd5a2020386cb30f92943a9d8f7f54321b4d6ae32b2e54af9a4 + md5: 4df0fae81f0b5bf47d48c882b086da11 + sha256: d9fa5b6b11252132a3383bbf87bd2f1b9d6248bef1b7e113c2a8ae41b0376218 category: main optional: false - name: libcrc32c @@ -7441,6 +7378,23 @@ package: sha256: 58477b67cc719060b5b069ba57161e20ba69b8695d154a719cb4b60caf577929 category: main optional: false +- name: libcst + version: 1.5.1 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + python: '>=3.9,<3.10.0a0' + python_abi: 3.9.* + pyyaml: '>=5.2' + typing_extensions: '>=3.7.4.2' + typing_inspect: '>=0.4.0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libcst-1.5.1-py39h15bf049_0.conda + hash: + md5: 1a5c4a5ba089bc860305e79d6a596d63 + sha256: 05a9e1aa483b54d0dd53849863312091411f5ae64b4bfd958c0fda33dd3f5c26 + category: main + optional: false - name: libcurl version: 8.10.1 manager: conda @@ -7479,15 +7433,27 @@ package: category: main optional: false - name: libcxx - version: 19.1.1 + version: 19.1.5 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.1-ha82da77_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libcxx-19.1.5-ha82da77_0.conda hash: - md5: 4ed0a90fd6a5bdda4ecf98912329993f - sha256: bc2f7cca206fa8a1dfe801c90362a1b6ec2967a75ef60d26e7c7114884c120c0 + md5: 3c7be0df28ccda1d193ea6de56dcb5ff + sha256: 7918cc0bb7a6554cdd3eee634c3dc414a1ab8ec49faeca1567367bb92118f9d7 + category: main + optional: false +- name: libde265 + version: 1.0.15 + manager: conda + platform: osx-arm64 + dependencies: + libcxx: '>=15' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libde265-1.0.15-h2ffa867_0.conda + hash: + md5: 7c718ee6d8497702145612fa0898a12d + sha256: 13747fa634f7f16d7f222b7d3869e3c1aab9d3a2791edeb2fc632a87663950e0 category: main optional: false - name: libdeflate @@ -7588,28 +7554,28 @@ package: category: main optional: false - name: libexpat - version: 2.6.3 + version: 2.6.4 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda hash: - md5: 59f4c43bb1b5ef1c71946ff2cbf59524 - sha256: 4bb47bb2cd09898737a5211e2992d63c555d63715a07ba56eae0aff31fb89c22 + md5: db833e03127376d461e1e13e76f09b6c + sha256: 56541b98447b58e52d824bd59d6382d609e11de1f8adf20b23143e353d2b8d26 category: main optional: false - name: libexpat - version: 2.6.3 + version: 2.6.4 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.3-hf9b8971_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.6.4-h286801f_0.conda hash: - md5: 5f22f07c2ab2dea8c66fe9585a062c96 - sha256: 5cbe5a199fba14ade55457a468ce663aac0b54832c39aa54470b3889b4c75c4a + md5: 38d2656dd914feb0cab8c629370768bf + sha256: e42ab5ace927ee7c84e3f0f7d813671e1cf3529f5f06ee5899606630498c2745 category: main optional: false - name: libffi @@ -7636,28 +7602,28 @@ package: category: main optional: false - name: libgcc - version: 14.1.0 + version: 14.2.0 manager: conda platform: linux-64 dependencies: _libgcc_mutex: '0.1' _openmp_mutex: '>=4.5' - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda hash: - md5: 002ef4463dd1e2b44a94a4ace468f5d2 - sha256: 10fa74b69266a2be7b96db881e18fa62cfa03082b65231e8d652e897c4b335a3 + md5: 3cb76c3f10d3bc7f1105b2fc9db984df + sha256: 53eb8a79365e58849e7b1a068d31f4f9e718dc938d6f2c03e960345739a03569 category: main optional: false - name: libgcc-ng - version: 14.1.0 + version: 14.2.0 manager: conda platform: linux-64 dependencies: - libgcc: 14.1.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda + libgcc: 14.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda hash: - md5: 1efc0ad219877a73ef977af7dbb51f17 - sha256: b91f7021e14c3d5c840fbf0dc75370d6e1f7c7ff4482220940eaafb9c64613b7 + md5: e39480b9ca41323497b05492a63bc35b + sha256: 3a76969c80e9af8b6e7a55090088bc41da4cffcde9e2c71b17f44d37b7cb87f7 category: main optional: false - name: libgd @@ -7675,356 +7641,132 @@ package: libjpeg-turbo: '>=3.0.0,<4.0a0' libpng: '>=1.6.39,<1.7.0a0' libtiff: '>=4.6.0,<4.8.0a0' - libwebp: '' - libwebp-base: '>=1.3.2,<2.0a0' - libzlib: '>=1.2.13,<2.0.0a0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda - hash: - md5: cfebc557e54905dadc355c0e9f003004 - sha256: b74f95a6e1f3b31a74741b39cba83ed99fc82d17243c0fd3b5ab16ddd48ab89d - category: main - optional: false -- name: libgd - version: 2.3.3 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - fontconfig: '>=2.14.2,<3.0a0' - fonts-conda-ecosystem: '' - freetype: '>=2.12.1,<3.0a0' - icu: '>=75.1,<76.0a0' - libexpat: '>=2.6.2,<3.0a0' - libiconv: '>=1.17,<2.0a0' - libjpeg-turbo: '>=3.0.0,<4.0a0' - libpng: '>=1.6.43,<1.7.0a0' - libtiff: '>=4.6.0,<4.8.0a0' - libwebp-base: '>=1.4.0,<2.0a0' - libzlib: '>=1.3.1,<2.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgd-2.3.3-hac1b3a8_10.conda - hash: - md5: c9e450ce5ced76f107c494fbd37325f5 - sha256: d15beaa2e862a09526e704f22f7d0b7fa73b114b868106dd686e167b9d65558e - category: main - optional: false -- name: libgdal - version: 3.8.1 - manager: conda - platform: linux-64 - dependencies: - __glibc: '>=2.17,<3.0.a0' - blosc: '>=1.21.5,<2.0a0' - cfitsio: '>=4.3.1,<4.3.2.0a0' - freexl: '>=2.0.0,<3.0a0' - geos: '>=3.12.1,<3.12.2.0a0' - geotiff: '>=1.7.1,<1.8.0a0' - giflib: '>=5.2.1,<5.3.0a0' - hdf4: '>=4.2.15,<4.2.16.0a0' - hdf5: '>=1.14.3,<1.14.4.0a0' - json-c: '>=0.17,<0.18.0a0' - kealib: '>=1.5.2,<1.6.0a0' - lerc: '>=4.0.0,<5.0a0' - libaec: '>=1.1.2,<2.0a0' - libarchive: '>=3.7.2,<3.8.0a0' - libcurl: '>=8.5.0,<9.0a0' - libdeflate: '>=1.19,<1.20.0a0' - libexpat: '>=2.5.0,<3.0a0' - libgcc-ng: '>=12' - libiconv: '>=1.17,<2.0a0' - libjpeg-turbo: '>=3.0.0,<4.0a0' - libkml: '>=1.3.0,<1.4.0a0' - libnetcdf: '>=4.9.2,<4.9.3.0a0' - libpng: '>=1.6.39,<1.7.0a0' - libpq: '>=16.1,<17.0a0' - libspatialite: '>=5.1.0,<5.2.0a0' - libsqlite: '>=3.44.2,<4.0a0' - libstdcxx-ng: '>=12' - libtiff: '>=4.6.0,<4.8.0a0' - libuuid: '>=2.38.1,<3.0a0' - libwebp-base: '>=1.3.2,<2.0a0' - libxml2: '>=2.12.2,<3.0.0a0' - libzlib: '>=1.2.13,<2.0.0a0' - lz4-c: '>=1.9.3,<1.10.0a0' - openjpeg: '>=2.5.0,<3.0a0' - openssl: '>=3.2.0,<4.0a0' - pcre2: '>=10.42,<10.43.0a0' - poppler: '>=23.12.0,<23.13.0a0' - postgresql: '' - proj: '>=9.3.1,<9.3.2.0a0' - tiledb: '>=2.18.2,<2.19.0a0' - xerces-c: '>=3.2.4,<3.3.0a0' - xz: '>=5.2.6,<6.0a0' - zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.8.1-hed8bd54_4.conda - hash: - md5: 32e453fb234a3534069396da161b145b - sha256: 8461bd176f6b526d856c28ad42c0899683211104c0609bbba5b897466597a34c - category: main - optional: false -- name: libgdal - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - libgdal-core: 3.9.2.* - libgdal-fits: 3.9.2.* - libgdal-grib: 3.9.2.* - libgdal-hdf4: 3.9.2.* - libgdal-hdf5: 3.9.2.* - libgdal-jp2openjpeg: 3.9.2.* - libgdal-kea: 3.9.2.* - libgdal-netcdf: 3.9.2.* - libgdal-pdf: 3.9.2.* - libgdal-pg: 3.9.2.* - libgdal-postgisraster: 3.9.2.* - libgdal-tiledb: 3.9.2.* - libgdal-xls: 3.9.2.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-3.9.2-hce30654_7.conda - hash: - md5: b9ff370534f04743fea9a532bb1cb967 - sha256: a8344237821a6a71c5f0b415df44fea61faed86afc09dd18d2a311cb3a2593b9 - category: main - optional: false -- name: libgdal-core - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - blosc: '>=1.21.6,<2.0a0' - geos: '>=3.13.0,<3.13.1.0a0' - geotiff: '>=1.7.3,<1.8.0a0' - giflib: '>=5.2.2,<5.3.0a0' - json-c: '>=0.18,<0.19.0a0' - lerc: '>=4.0.0,<5.0a0' - libarchive: '>=3.7.4,<3.8.0a0' - libcurl: '>=8.10.1,<9.0a0' - libcxx: '>=17' - libdeflate: '>=1.22,<1.23.0a0' - libexpat: '>=2.6.3,<3.0a0' - libiconv: '>=1.17,<2.0a0' - libjpeg-turbo: '>=3.0.0,<4.0a0' - libkml: '>=1.3.0,<1.4.0a0' - libpng: '>=1.6.44,<1.7.0a0' - libspatialite: '>=5.1.0,<5.2.0a0' - libsqlite: '>=3.46.1,<4.0a0' - libtiff: '>=4.7.0,<4.8.0a0' - libwebp-base: '>=1.4.0,<2.0a0' - libxml2: '>=2.12.7,<3.0a0' - libzlib: '>=1.3.1,<2.0a0' - lz4-c: '>=1.9.3,<1.10.0a0' - openssl: '>=3.3.2,<4.0a0' - pcre2: '>=10.44,<10.45.0a0' - proj: '>=9.5.0,<9.6.0a0' - xerces-c: '>=3.2.5,<3.3.0a0' - xz: '>=5.2.6,<6.0a0' - zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-core-3.9.2-hfd0b032_7.conda - hash: - md5: b553800429e5682120428772324184f6 - sha256: 243f081ad166e32a614d02293a4fa2ba773ab8e4ba01e5945d64536b68414c71 - category: main - optional: false -- name: libgdal-fits - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - cfitsio: '>=4.4.1,<4.4.2.0a0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-fits-3.9.2-h248c7bc_7.conda - hash: - md5: f6fddae38163fff25a99adef1765496c - sha256: 2795e2d484722cbc3381920982da0250d3dcc3f3556b8bcdf1ed1c134a7d2f1b - category: main - optional: false -- name: libgdal-grib - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libaec: '>=1.1.3,<2.0a0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-grib-3.9.2-h6d3d72d_7.conda - hash: - md5: f8794c6cd7aaa4cd18ebde3fe10fba07 - sha256: d9eb5d2a428da6d057c84c0902692e73ce77993b5dbced725dc0b814d382d23d - category: main - optional: false -- name: libgdal-hdf4 - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - hdf4: '>=4.2.15,<4.2.16.0a0' - libaec: '>=1.1.3,<2.0a0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-hdf4-3.9.2-h3847bb8_7.conda - hash: - md5: 0ff2c29987702b8f7b61c865d951cd90 - sha256: 2431fbe2e19007c61093052ce021963313446472a5bfd148da546c388c9409be - category: main - optional: false -- name: libgdal-hdf5 - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - hdf5: '>=1.14.3,<1.14.4.0a0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-hdf5-3.9.2-h2def128_7.conda - hash: - md5: 6bbc7e8df9ef22139bc1bab39ba3dd56 - sha256: 3c298f5da6f445637deba5bd3bd48389e84740060f565fcc889912de7eeccd12 - category: main - optional: false -- name: libgdal-jp2openjpeg - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - openjpeg: '>=2.5.2,<3.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-jp2openjpeg-3.9.2-hd61e619_7.conda - hash: - md5: 3114191129246e6571d739289bb8083f - sha256: abcbbe2d98a6eb471ac620aef4d687ad6acdcc61188063dc42e9e598a90d7868 - category: main - optional: false -- name: libgdal-kea - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - hdf5: '>=1.14.3,<1.14.4.0a0' - kealib: '>=1.5.3,<1.6.0a0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libgdal-hdf5: 3.9.2.* - libkml: '>=1.3.0,<1.4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-kea-3.9.2-h7b2de0b_7.conda - hash: - md5: 47c89ca8baab301fb54f3b1faa166e4d - sha256: 8ba32b0e3654b221f3dc902ddfb3ad1e74777220c5b4ea30331e80fe801c5bef - category: main - optional: false -- name: libgdal-netcdf - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - hdf4: '>=4.2.15,<4.2.16.0a0' - hdf5: '>=1.14.3,<1.14.4.0a0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libgdal-hdf4: 3.9.2.* - libgdal-hdf5: 3.9.2.* - libkml: '>=1.3.0,<1.4.0a0' - libnetcdf: '>=4.9.2,<4.9.3.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-netcdf-3.9.2-h5e0d008_7.conda - hash: - md5: 438cf785fe8b4d9acabbae8ce6e39cb6 - sha256: eb093b7e72a9374c421fa92128282a676a54bb37ca5960a8132dd6326306a1a8 - category: main - optional: false -- name: libgdal-pdf - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - poppler: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-pdf-3.9.2-h587d690_7.conda + libwebp: '' + libwebp-base: '>=1.3.2,<2.0a0' + libzlib: '>=1.2.13,<2.0.0a0' + zlib: '' + url: https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h119a65a_9.conda hash: - md5: 4323634089f1156bd69a77ad48f53d0d - sha256: 68c1a57552963982a1a703b85a42bbd8a15bb253d9acce13332d1ff911078de4 + md5: cfebc557e54905dadc355c0e9f003004 + sha256: b74f95a6e1f3b31a74741b39cba83ed99fc82d17243c0fd3b5ab16ddd48ab89d category: main optional: false -- name: libgdal-pg - version: 3.9.2 +- name: libgd + version: 2.3.3 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - libpq: '>=17.0,<18.0a0' - postgresql: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-pg-3.9.2-h6a0b679_7.conda + fontconfig: '>=2.14.2,<3.0a0' + fonts-conda-ecosystem: '' + freetype: '>=2.12.1,<3.0a0' + icu: '>=75.1,<76.0a0' + libexpat: '>=2.6.2,<3.0a0' + libiconv: '>=1.17,<2.0a0' + libjpeg-turbo: '>=3.0.0,<4.0a0' + libpng: '>=1.6.43,<1.7.0a0' + libtiff: '>=4.6.0,<4.8.0a0' + libwebp-base: '>=1.4.0,<2.0a0' + libzlib: '>=1.3.1,<2.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgd-2.3.3-hac1b3a8_10.conda hash: - md5: 596b2a38085a9352856af7ab3bdefe41 - sha256: f0b0d93eb7e4d99c5581978adab99b4b930be40b610e858d642af36c9ef00793 + md5: c9e450ce5ced76f107c494fbd37325f5 + sha256: d15beaa2e862a09526e704f22f7d0b7fa73b114b868106dd686e167b9d65558e category: main optional: false -- name: libgdal-postgisraster - version: 3.9.2 +- name: libgdal + version: 3.8.1 manager: conda - platform: osx-arm64 + platform: linux-64 dependencies: - __osx: '>=11.0' - libcxx: '>=17' - libgdal-core: '>=3.9' + __glibc: '>=2.17,<3.0.a0' + blosc: '>=1.21.5,<2.0a0' + cfitsio: '>=4.3.1,<4.3.2.0a0' + freexl: '>=2.0.0,<3.0a0' + geos: '>=3.12.1,<3.12.2.0a0' + geotiff: '>=1.7.1,<1.8.0a0' + giflib: '>=5.2.1,<5.3.0a0' + hdf4: '>=4.2.15,<4.2.16.0a0' + hdf5: '>=1.14.3,<1.14.4.0a0' + json-c: '>=0.17,<0.18.0a0' + kealib: '>=1.5.2,<1.6.0a0' + lerc: '>=4.0.0,<5.0a0' + libaec: '>=1.1.2,<2.0a0' + libarchive: '>=3.7.2,<3.8.0a0' + libcurl: '>=8.5.0,<9.0a0' + libdeflate: '>=1.19,<1.20.0a0' + libexpat: '>=2.5.0,<3.0a0' + libgcc-ng: '>=12' + libiconv: '>=1.17,<2.0a0' + libjpeg-turbo: '>=3.0.0,<4.0a0' libkml: '>=1.3.0,<1.4.0a0' - libpq: '>=17.0,<18.0a0' + libnetcdf: '>=4.9.2,<4.9.3.0a0' + libpng: '>=1.6.39,<1.7.0a0' + libpq: '>=16.1,<17.0a0' + libspatialite: '>=5.1.0,<5.2.0a0' + libsqlite: '>=3.44.2,<4.0a0' + libstdcxx-ng: '>=12' + libtiff: '>=4.6.0,<4.8.0a0' + libuuid: '>=2.38.1,<3.0a0' + libwebp-base: '>=1.3.2,<2.0a0' + libxml2: '>=2.12.2,<3.0.0a0' + libzlib: '>=1.2.13,<2.0.0a0' + lz4-c: '>=1.9.3,<1.10.0a0' + openjpeg: '>=2.5.0,<3.0a0' + openssl: '>=3.2.0,<4.0a0' + pcre2: '>=10.42,<10.43.0a0' + poppler: '>=23.12.0,<23.13.0a0' postgresql: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-postgisraster-3.9.2-h6a0b679_7.conda - hash: - md5: f044c31cdd36806e627e23329c6089b0 - sha256: d9b9dfece530a470e957c188c8452082d387a6b5666bafa640aed6694e4b4265 - category: main - optional: false -- name: libgdal-tiledb - version: 3.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libcxx: '>=17' - libgdal-core: '>=3.9' - libkml: '>=1.3.0,<1.4.0a0' - tiledb: '>=2.26.1,<2.27.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-tiledb-3.9.2-h27a95ea_5.conda + proj: '>=9.3.1,<9.3.2.0a0' + tiledb: '>=2.18.2,<2.19.0a0' + xerces-c: '>=3.2.4,<3.3.0a0' + xz: '>=5.2.6,<6.0a0' + zstd: '>=1.5.5,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.8.1-hed8bd54_4.conda hash: - md5: 7dae135e8015efc11f75c8c37fd13035 - sha256: a3a782d78068693f9f9de2c2be4eb87e5f89fd976888ecd46125e2c96a0ff688 + md5: 32e453fb234a3534069396da161b145b + sha256: 8461bd176f6b526d856c28ad42c0899683211104c0609bbba5b897466597a34c category: main optional: false -- name: libgdal-xls - version: 3.9.2 +- name: libgdal-core + version: 3.10.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - freexl: '>=2.0.0,<3.0a0' - libcxx: '>=17' - libgdal-core: '>=3.9' + blosc: '>=1.21.6,<2.0a0' + geos: '>=3.13.0,<3.13.1.0a0' + geotiff: '>=1.7.3,<1.8.0a0' + giflib: '>=5.2.2,<5.3.0a0' + json-c: '>=0.18,<0.19.0a0' + lerc: '>=4.0.0,<5.0a0' + libarchive: '>=3.7.7,<3.8.0a0' + libcurl: '>=8.10.1,<9.0a0' + libcxx: '>=18' + libdeflate: '>=1.22,<1.23.0a0' + libexpat: '>=2.6.4,<3.0a0' + libheif: '>=1.18.2,<1.19.0a0' + libiconv: '>=1.17,<2.0a0' + libjpeg-turbo: '>=3.0.0,<4.0a0' libkml: '>=1.3.0,<1.4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-xls-3.9.2-habc1c91_7.conda + liblzma: '>=5.6.3,<6.0a0' + libpng: '>=1.6.44,<1.7.0a0' + libspatialite: '>=5.1.0,<5.2.0a0' + libsqlite: '>=3.47.2,<4.0a0' + libtiff: '>=4.7.0,<4.8.0a0' + libwebp-base: '>=1.4.0,<2.0a0' + libxml2: '>=2.13.5,<3.0a0' + libzlib: '>=1.3.1,<2.0a0' + lz4-c: '>=1.9.3,<1.10.0a0' + openssl: '>=3.4.0,<4.0a0' + pcre2: '>=10.44,<10.45.0a0' + proj: '>=9.5.1,<9.6.0a0' + xerces-c: '>=3.2.5,<3.3.0a0' + zstd: '>=1.5.6,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-core-3.10.0-h9ccd308_6.conda hash: - md5: 09290c8b53af1b977967ad9a4734a0e2 - sha256: ad62f074cd24ebf915b2e715e2d2a1e315795672444b7be1be0c6ddd7f51b0e4 + md5: cd753bb7543fc897ceaedcdabe8d580f + sha256: d44ed8fa3feff35d7f8213046b686942e087dc026dfb5d8dc2484d968d0843df category: main optional: false - name: libgettextpo @@ -8055,15 +7797,15 @@ package: category: main optional: false - name: libgfortran - version: 14.1.0 + version: 14.2.0 manager: conda platform: linux-64 dependencies: - libgfortran5: 14.1.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda + libgfortran5: 14.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda hash: - md5: 591e631bc1ae62c64f2ab4f66178c097 - sha256: ed77f04f873e43a26e24d443dd090631eedc7d0ace3141baaefd96a123e47535 + md5: f1fd30127802683586f768875127a987 + sha256: fc9e7f22a17faf74da904ebfc4d88699013d2992e55505e4aa0eb01770290977 category: main optional: false - name: libgfortran @@ -8079,27 +7821,27 @@ package: category: main optional: false - name: libgfortran-ng - version: 14.1.0 + version: 14.2.0 manager: conda platform: linux-64 dependencies: - libgfortran: 14.1.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda + libgfortran: 14.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda hash: - md5: 16cec94c5992d7f42ae3f9fa8b25df8d - sha256: a2dc35cb7f87bb5beebf102d4085574c6a740e1df58e743185d4434cc5e4e0ae + md5: 0a7f4cd238267c88e5d69f7826a407eb + sha256: 423f1e2403f0c665748e42d335e421e53fd03c08d457cfb6f360d329d9459851 category: main optional: false - name: libgfortran5 - version: 14.1.0 + version: 14.2.0 manager: conda platform: linux-64 dependencies: - libgcc: '>=14.1.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda + libgcc: '>=14.2.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda hash: - md5: 10a0cef64b784d6ab6da50ebca4e984d - sha256: c40d7db760296bf9c776de12597d2f379f30e890b9ae70c1de962ff2aa1999f6 + md5: 9822b874ea29af082e5d36098d25427d + sha256: d149a37ca73611e425041f33b9d8dbed6e52ec506fe8cc1fc0ee054bddeb6d5d category: main optional: false - name: libgfortran5 @@ -8133,7 +7875,7 @@ package: category: main optional: false - name: libglib - version: 2.82.1 + version: 2.82.2 manager: conda platform: osx-arm64 dependencies: @@ -8143,10 +7885,10 @@ package: libintl: '>=0.22.5,<1.0a0' libzlib: '>=1.3.1,<2.0a0' pcre2: '>=10.44,<10.45.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.82.1-h4821c08_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libglib-2.82.2-h07bd6cf_0.conda hash: - md5: 277cf745965bba2d70dbeec422cbff40 - sha256: 5494aefb97f3e0f7cbc10ab3573e227dcb436c77d104ecd3c29e6d7543c32eb5 + md5: 890783f64502fa6bfcdc723cfbf581b4 + sha256: 101fb31c509d6a69ac5d612b51d4088ddbc675fca18cf0c3589cfee26cd01ca0 category: main optional: false - name: libgoogle-cloud @@ -8169,25 +7911,25 @@ package: category: main optional: false - name: libgoogle-cloud - version: 2.29.0 + version: 2.30.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libabseil: '>=20240116.2,<20240117.0a0' - libcurl: '>=8.9.1,<9.0a0' + libabseil: '>=20240722.0,<20240723.0a0' + libcurl: '>=8.10.1,<9.0a0' libcxx: '>=17' - libgrpc: '>=1.62.2,<1.63.0a0' - libprotobuf: '>=4.25.3,<4.25.4.0a0' + libgrpc: '>=1.65.5,<1.66.0a0' + libprotobuf: '>=5.27.5,<5.27.6.0a0' openssl: '>=3.3.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.29.0-hfa33a2f_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.30.0-h2e6cea1_0.conda hash: - md5: f78c7bd435ee45f4661daae9e81ddf13 - sha256: 1f42048702d773a355d276d24313ac63781a331959fc3662c6be36e979d7845c + md5: be857dc2a7d747d9aa191ed6c701bde7 + sha256: 2c58299d8275cfcf575166ba59baa9ac2b32c0c5a2677ee7a51e1d67b2d28f92 category: main optional: false - name: libgoogle-cloud-storage - version: 2.29.0 + version: 2.30.0 manager: conda platform: osx-arm64 dependencies: @@ -8196,13 +7938,13 @@ package: libcrc32c: '>=1.1.2,<1.2.0a0' libcurl: '' libcxx: '>=17' - libgoogle-cloud: 2.29.0 + libgoogle-cloud: 2.30.0 libzlib: '>=1.3.1,<2.0a0' openssl: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.29.0-h90fd6fa_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-storage-2.30.0-h90fd6fa_0.conda hash: - md5: baee0b9cb1c5319f370a534ca5a16267 - sha256: ec80383fbb6fae95d2ff7d04ba46b282ab48219b7ce85b3cd5ee7d0d8bae74e1 + md5: 34381339cf47d7af329026d1474f30ff + sha256: 1c531f3f5867c5ec9d3d8a7f0babee5ca106f6bf39510b277503d9aea55afeae category: main optional: false - name: libgrpc @@ -8226,37 +7968,56 @@ package: category: main optional: false - name: libgrpc - version: 1.62.2 + version: 1.65.5 manager: conda platform: osx-arm64 dependencies: - c-ares: '>=1.28.1,<2.0a0' - libabseil: '>=20240116.1,<20240117.0a0' - libcxx: '>=16' - libprotobuf: '>=4.25.3,<4.25.4.0a0' + __osx: '>=11.0' + c-ares: '>=1.33.1,<2.0a0' + libabseil: '>=20240722.0,<20240723.0a0' + libcxx: '>=17' + libprotobuf: '>=5.27.5,<5.27.6.0a0' libre2-11: '>=2023.9.1' - libzlib: '>=1.2.13,<2.0.0a0' - openssl: '>=3.2.1,<4.0a0' + libzlib: '>=1.3.1,<2.0a0' + openssl: '>=3.3.2,<4.0a0' re2: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.62.2-h9c18a4f_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.65.5-h3d9cf25_0.conda + hash: + md5: b829a3509f5d89b21fa481ebc8edd953 + sha256: a92096af0fa67bb03fe2d40dfb11e7746603842a78fddce9f06e3ced9d93b61e + category: main + optional: false +- name: libheif + version: 1.18.2 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + aom: '>=3.9.1,<3.10.0a0' + dav1d: '>=1.2.1,<1.2.2.0a0' + libavif16: '>=1.1.1,<2.0a0' + libcxx: '>=16' + libde265: '>=1.0.15,<1.0.16.0a0' + x265: '>=3.5,<3.6.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libheif-1.18.2-gpl_he913df3_100.conda hash: - md5: e624fc11026dbb84c549435eccd08623 - sha256: d2c5b5a828f6f1242c11e8c91968f48f64446f7dd5cbfa1197545e465eb7d47a + md5: 29911afbc2ec42a42914d5255dea52e6 + sha256: 34a70c5889989013b199c6266a30362539af9e24211a6963a0cb0d7ba786f12d category: main optional: false - name: libhwloc - version: 2.11.1 + version: 2.11.2 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' + libgcc: '>=13' + libstdcxx: '>=13' libxml2: '>=2.12.7,<3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_he43201b_1000.conda hash: - md5: f54aeebefb5c5ff84eca4fb05ca8aa3a - sha256: 8473a300e10b79557ce0ac81602506b47146aff3df4cc3568147a7dd07f480a2 + md5: 36247217c4e1018085bd9db41eb3526a + sha256: 75be8732e6f94ff2faa129f44ec4970275e1d977559b0c2fb75b7baa5347e16b category: main optional: false - name: libiconv @@ -8357,10 +8118,10 @@ package: platform: linux-64 dependencies: libblas: 3.9.0 - url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda + url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda hash: - md5: fd540578678aefe025705f4b58b36b2e - sha256: a15da20c3c0fb5f356e5b4e2f1e87b0da11b9a46805a7f2609bf30f23453831a + md5: 4dc03a53fc69371a6158d0ed37214cd3 + sha256: 9d1ff017714edb2d84868f0f931a4a0e7c289a971062b2ac66cfc8145df7e20e category: main optional: false - name: liblapack @@ -8369,10 +8130,62 @@ package: platform: osx-arm64 dependencies: libblas: 3.9.0 - url: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-24_osxarm64_openblas.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/liblapack-3.9.0-25_osxarm64_openblas.conda hash: - md5: 49a3241f76cdbe705e346204a328f66c - sha256: 67fbfd0466eee443cda9596ed22daabedc96b7b4d1b31f49b1c1b0983dd1dd2c + md5: 19bbddfec972d401838330453186108d + sha256: fdd742407672a9af20e70764550cf18b3ab67f12e48bf04163b90492fbc401e7 + category: main + optional: false +- name: liblzma + version: 5.6.3 + manager: conda + platform: linux-64 + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + url: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda + hash: + md5: 2ecf2f1c7e4e21fcfe6423a51a992d84 + sha256: e6e425252f3839e2756e4af1ea2074dffd3396c161bf460629f9dfd6a65f15c6 + category: main + optional: false +- name: liblzma + version: 5.6.3 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.6.3-h39f12f2_1.conda + hash: + md5: b2553114a7f5e20ccd02378a77d836aa + sha256: d863b8257406918ffdc50ae65502f2b2d6cede29404d09a094f59509d6a0aaf1 + category: main + optional: false +- name: liblzma-devel + version: 5.6.3 + manager: conda + platform: linux-64 + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + liblzma: 5.6.3 + url: https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda + hash: + md5: cc4687e1814ed459f3bd6d8e05251ab2 + sha256: ca17f037a0a7137874597866a171166677e4812a9a8a853007f0f582e3ff6d1d + category: main + optional: false +- name: liblzma-devel + version: 5.6.3 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + liblzma: 5.6.3 + url: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-devel-5.6.3-h39f12f2_1.conda + hash: + md5: 692ccac07529215d42c051c6a60bc5a5 + sha256: c785d43d4758e18153b502c7d7d3a9181f3c95b2ae64a389fe49af5bf3a53f05 category: main optional: false - name: libmagma @@ -8437,63 +8250,39 @@ package: sha256: 055572a4c8a1c3f9ac60071ee678f5ea49cfd7ac60a636d817988a6f9d6de6ae category: main optional: false -- name: libnetcdf - version: 4.9.2 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - blosc: '>=1.21.5,<2.0a0' - bzip2: '>=1.0.8,<2.0a0' - hdf4: '>=4.2.15,<4.2.16.0a0' - hdf5: '>=1.14.3,<1.14.4.0a0' - libaec: '>=1.1.3,<2.0a0' - libcurl: '>=8.8.0,<9.0a0' - libcxx: '>=16' - libxml2: '>=2.12.7,<3.0a0' - libzip: '>=1.10.1,<2.0a0' - libzlib: '>=1.2.13,<2.0a0' - openssl: '>=3.3.1,<4.0a0' - zlib: '' - zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libnetcdf-4.9.2-nompi_he469be0_114.conda - hash: - md5: 8fd3ce6d910ed831c130c391c4364d3f - sha256: aeac591ba859f9cf775993e8b7f21e50803405d41ef363dc4981d114e8df88a8 - category: main - optional: false - name: libnghttp2 - version: 1.58.0 + version: 1.64.0 manager: conda platform: linux-64 dependencies: - c-ares: '>=1.23.0,<2.0a0' + __glibc: '>=2.17,<3.0.a0' + c-ares: '>=1.32.3,<2.0a0' libev: '>=4.33,<5.0a0' - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<2.0.0a0' - openssl: '>=3.2.0,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda + libgcc: '>=13' + libstdcxx: '>=13' + libzlib: '>=1.3.1,<2.0a0' + openssl: '>=3.3.2,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda hash: - md5: 700ac6ea6d53d5510591c4344d5c989a - sha256: 1910c5306c6aa5bcbd623c3c930c440e9c77a5a019008e1487810e3c1d3716cb + md5: 19e57602824042dfd0446292ef90488b + sha256: b0f2b3695b13a989f75d8fd7f4778e1c7aabe3b36db83f0fe80b2cd812c0e975 category: main optional: false - name: libnghttp2 - version: 1.58.0 + version: 1.64.0 manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' - c-ares: '>=1.23.0,<2.0a0' - libcxx: '>=16.0.6' + __osx: '>=11.0' + c-ares: '>=1.34.2,<2.0a0' + libcxx: '>=17' libev: '>=4.33,<5.0a0' - libzlib: '>=1.2.13,<2.0.0a0' - openssl: '>=3.2.0,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.58.0-ha4dd798_1.conda + libzlib: '>=1.3.1,<2.0a0' + openssl: '>=3.3.2,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libnghttp2-1.64.0-h6d7220d_0.conda hash: - md5: 1813e066bfcef82de579a0be8a766df4 - sha256: fc97aaaf0c6d0f508be313d86c2705b490998d382560df24be918b8e977802cd + md5: 3408c02539cee5f1141f9f11450b6a51 + sha256: 00cc685824f39f51be5233b54e19f45abd60de5d8847f1a56906f8936648b72f category: main optional: false - name: libnsl @@ -8532,48 +8321,49 @@ package: category: main optional: false - name: libopenblas - version: 0.3.27 + version: 0.3.28 manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - libgfortran-ng: '' - libgfortran5: '>=12.3.0' - url: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=14' + libgfortran: '' + libgfortran5: '>=14.2.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda hash: - md5: ae05ece66d3924ac3d48b4aa3fa96cec - sha256: 714cb82d7c4620ea2635a92d3df263ab841676c9b183d0c01992767bb2451c39 + md5: 62857b389e42b36b686331bec0922050 + sha256: 99ba271d8a80a1af2723f2e124ffd91d850074c0389c067e6d96d72a2dbfeabe category: main optional: false - name: libopenblas - version: 0.3.27 + version: 0.3.28 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' libgfortran: 5.* - libgfortran5: '>=12.3.0' - llvm-openmp: '>=16.0.6' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.27-openmp_h517c56d_1.conda + libgfortran5: '>=13.2.0' + llvm-openmp: '>=18.1.8' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libopenblas-0.3.28-openmp_hf332438_1.conda hash: - md5: 71b8a34d70aa567a990162f327e81505 - sha256: 46cfcc592b5255262f567cd098be3c61da6bca6c24d640e878dc8342b0f6d069 + md5: 40803a48d947c8639da6704e9a44d3ce + sha256: 62bb669c37a845129096f73d446cdb6bb170e4927f2fea2b661329680dbbc373 category: main optional: false - name: libparquet - version: 17.0.0 + version: 18.0.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libarrow: 17.0.0 - libcxx: '>=17' - libthrift: '>=0.20.0,<0.20.1.0a0' + libarrow: 18.0.0 + libcxx: '>=18' + libthrift: '>=0.21.0,<0.21.1.0a0' openssl: '>=3.3.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libparquet-17.0.0-hf0ba9ef_16_cpu.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libparquet-18.0.0-hda0ea68_0_cpu.conda hash: - md5: 517ecf2ee0c2822e6120c258f3acd383 - sha256: 6ed28f06409b02a9f521ee5e8cf2f4d3fb63a7633c11f2ee7ec2880e78e184e5 + md5: b24b66fb60eacddddaa69532a7f37776 + sha256: 2b691ea4f0150dd1abbbd0321d3ec92315be9ad07d1e9f575175f042fbdddbe1 category: main optional: false - name: libpng @@ -8612,26 +8402,26 @@ package: krb5: '>=1.21.3,<1.22.0a0' libgcc: '>=13' openssl: '>=3.3.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_3.conda hash: - md5: 76c891962472b55544b51c52bae15587 - sha256: 5d2c8b777175b2c3afbebffe94dbce7fc07198e324201cf693c560f23c9e4be6 + md5: 50e2dddb3417a419cbc2388d0b1c06f7 + sha256: 51dddb6e5879960a1b9b3c5de0eb970373903977c0fa68a42f86bb7197c695cf category: main optional: false - name: libpq - version: '17.0' + version: '17.2' manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' icu: '>=75.1,<76.0a0' krb5: '>=1.21.3,<1.22.0a0' - openldap: '>=2.6.8,<2.7.0a0' - openssl: '>=3.3.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libpq-17.0-h9fd3c6c_3.conda + openldap: '>=2.6.9,<2.7.0a0' + openssl: '>=3.4.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libpq-17.2-ha9b7db8_1.conda hash: - md5: 166c7f2d33bbbf9afb5bd5ae03a06230 - sha256: e314a678eb74ecc3d0625ed7be0ae68ba188d758419c4d3c6cb37ef685a88093 + md5: 59375b0b03548aee1d4d1a2c8a7348b3 + sha256: 364058029fec7f8bd27607359fa97773476cc9a7f798a3f9398efd682b5ffb8b category: main optional: false - name: libprotobuf @@ -8649,31 +8439,32 @@ package: category: main optional: false - name: libprotobuf - version: 4.25.3 + version: 5.27.5 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libabseil: '>=20240116.2,<20240117.0a0' + libabseil: '>=20240722.0,<20240723.0a0' libcxx: '>=17' libzlib: '>=1.3.1,<2.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-4.25.3-hc39d83c_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-5.27.5-h53f8970_2.conda hash: - md5: fa77986d9170450c014586ab87e144f8 - sha256: f51bde2dfe73968ab3090c1098f520b65a8d8f11e945cb13bf74d19e30966b61 + md5: e9d021f82c48bb08b0b2c321b2f7778c + sha256: 787d86c041c03d33b24e28df5f881f47c74c3fe9053b791f14616dc51f32a687 category: main optional: false - name: libre2-11 - version: 2023.09.01 + version: 2024.07.02 manager: conda platform: osx-arm64 dependencies: - libabseil: '>=20240116.1,<20240117.0a0' - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2023.09.01-h7b2c953_2.conda + __osx: '>=11.0' + libabseil: '>=20240722.0,<20240723.0a0' + libcxx: '>=17' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2024.07.02-h2348fd5_1.conda hash: - md5: 0b7b2ced046d6b5fe6e9d46b1ee0324c - sha256: c8a0a6e7a627dc9c66ffb8858f8f6d499f67fd269b6636b25dc5169760610f05 + md5: 5a7065309a66097738be6a06fd04b7ef + sha256: 6facca42cfc85a05b33e484a8b0df7857cc092db34806946d022270098d8d20f category: main optional: false - name: librsvg @@ -8836,81 +8627,82 @@ package: category: main optional: false - name: libsqlite - version: 3.46.1 + version: 3.47.2 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' libzlib: '>=1.3.1,<2.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda hash: - md5: 36f79405ab16bf271edb55b213836dac - sha256: 9851c049abafed3ee329d6c7c2033407e2fc269d33a75c071110ab52300002b0 + md5: b58da17db24b6e08bcbf8fed2fb8c915 + sha256: 48af21ebc2cbf358976f1e0f4a0ab9e91dfc83d0ef337cf3837c6f5bc22fb352 category: main optional: false - name: libsqlite - version: 3.46.1 + version: 3.47.2 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' libzlib: '>=1.3.1,<2.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.46.1-hc14010f_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.47.2-h3f77e49_0.conda hash: - md5: 58050ec1724e58668d0126a1615553fa - sha256: 3725f962f490c5d44dae326d5f5b2e3c97f71a6322d914ccc85b5ddc2e50d120 + md5: 122d6f29470f1a991e85608e77e56a8a + sha256: f192f3c8973de9ec4c214990715f13b781965247a5cedf9162e7f9e699cfc3c4 category: main optional: false - name: libssh2 - version: 1.11.0 + version: 1.11.1 manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - libzlib: '>=1.2.13,<2.0.0a0' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libzlib: '>=1.3.1,<2.0a0' + openssl: '>=3.4.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda hash: - md5: 1f5a58e686b13bcfde88b93f547d23fe - sha256: 50e47fd9c4f7bf841a11647ae7486f65220cfc988ec422a4475fe8d5a823824d + md5: be2de152d8073ef1c01b7728475f2fe7 + sha256: 0407ac9fda2bb67e11e357066eff144c845801d00b5f664efbc48813af1e7bb9 category: main optional: false - name: libssh2 - version: 1.11.0 + version: 1.11.1 manager: conda platform: osx-arm64 dependencies: - libzlib: '>=1.2.13,<2.0.0a0' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.0-h7a5bd25_0.conda + libzlib: '>=1.3.1,<2.0a0' + openssl: '>=3.4.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libssh2-1.11.1-h9cc3647_0.conda hash: - md5: 029f7dc931a3b626b94823bc77830b01 - sha256: bb57d0c53289721fff1eeb3103a1c6a988178e88d8a8f4345b0b91a35f0e0015 + md5: ddc7194676c285513706e5fc64f214d7 + sha256: f7047c6ed44bcaeb04432e8c74da87591940d091b0a3940c0d884b7faa8062e9 category: main optional: false - name: libstdcxx - version: 14.1.0 + version: 14.2.0 manager: conda platform: linux-64 dependencies: - libgcc: 14.1.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda + libgcc: 14.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda hash: - md5: 9dbb9699ea467983ba8a4ba89b08b066 - sha256: 44decb3d23abacf1c6dd59f3c152a7101b7ca565b4ef8872804ceaedcc53a9cd + md5: 234a5554c53625688d51062645337328 + sha256: 4661af0eb9bdcbb5fb33e5d0023b001ad4be828fccdcc56500059d56f9869462 category: main optional: false - name: libstdcxx-ng - version: 14.1.0 + version: 14.2.0 manager: conda platform: linux-64 dependencies: - libstdcxx: 14.1.0 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda + libstdcxx: 14.2.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda hash: - md5: bd2598399a70bb86d8218e95548d735e - sha256: a2dc44f97290740cc187bfe94ce543e6eb3c2ea8964d99f189a1d8c97b419b8c + md5: 8371ac6457591af2cf6159439c1fd051 + sha256: 25bb30b827d4f6d6f0522cc0579e431695503822f144043b93c50237017fffd8 category: main optional: false - name: libthrift @@ -8930,7 +8722,7 @@ package: category: main optional: false - name: libthrift - version: 0.20.0 + version: 0.21.0 manager: conda platform: osx-arm64 dependencies: @@ -8938,11 +8730,11 @@ package: libcxx: '>=17' libevent: '>=2.1.12,<2.1.13.0a0' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.3.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.20.0-h64651cc_1.conda + openssl: '>=3.3.2,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.21.0-h64651cc_0.conda hash: - md5: 4cf2e5233320648397184415f380c891 - sha256: b6afcbc934258e0474e0f1059bc7b23865723b902062f2f2910e0370e6495401 + md5: 7ce2bd2f650f8c31ad7ba4c7bfea61b7 + sha256: 7a6c7d5f58cbbc2ccd6493b4b821639fdb0701b9b04c737a949e8cb6adf1c9ad category: main optional: false - name: libtiff @@ -8972,17 +8764,17 @@ package: dependencies: __osx: '>=11.0' lerc: '>=4.0.0,<5.0a0' - libcxx: '>=17' + libcxx: '>=18' libdeflate: '>=1.22,<1.23.0a0' libjpeg-turbo: '>=3.0.0,<4.0a0' + liblzma: '>=5.6.3,<6.0a0' libwebp-base: '>=1.4.0,<2.0a0' libzlib: '>=1.3.1,<2.0a0' - xz: '>=5.2.6,<6.0a0' zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.0-hfce79cd_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libtiff-4.7.0-ha962b0a_2.conda hash: - md5: b9abf45f7c64caf3303725f1aa0e9a4d - sha256: 97ba24c74750b6e731b3fe0d2a751cda6148b4937d2cc3f72d43bf7b3885c39d + md5: 8e14b5225c593f099a21971568e6d7b4 + sha256: d9e6835fd189b85eb90dbfdcc51f5375decbf5bb53130042f49bbd6bfb0b24be category: main optional: false - name: libtorch @@ -8991,21 +8783,21 @@ package: platform: osx-arm64 dependencies: __osx: '>=11.0' - libabseil: '>=20240116.2,<20240117.0a0' + libabseil: '>=20240722.0,<20240723.0a0' libcblas: '>=3.9.0,<4.0a0' libcxx: '>=17' liblapack: '>=3.9.0,<4.0a0' - libprotobuf: '>=4.25.3,<4.25.4.0a0' - libuv: '>=1.49.0,<2.0a0' + libprotobuf: '>=5.27.5,<5.27.6.0a0' + libuv: '>=1.49.1,<2.0a0' llvm-openmp: '>=17.0.6' numpy: '>=1.19,<3' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* sleef: '>=3.7,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libtorch-2.4.1-cpu_generic_hd749476_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libtorch-2.4.1-cpu_generic_hd6e7f6f_2.conda hash: - md5: d45112f7b12cfcc35134a07fcfdba219 - sha256: 929288dc1f15c5f7b52639e8bcd5125c6dc10e5e343ca5f7282ab80599e10a33 + md5: 29c831ebe3918eb3f6e0a1167595f470 + sha256: 7d12b77c94eb082225f46e2a59a4594a9560aee4d74bca61c8f5c613fa14fac0 category: main optional: false - name: libutf8proc @@ -9013,22 +8805,24 @@ package: manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2 + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + url: https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-hf23e847_1.conda hash: - md5: ede4266dc02e875fe1ea77b25dd43747 - sha256: 49082ee8d01339b225f7f8c60f32a2a2c05fe3b16f31b554b4fb2c1dea237d1c + md5: b1aa0faa95017bca11369bd080487ec4 + sha256: 104cf5b427fc914fec63e55f685a39480abeb4beb34bdbc77dea084c8f5a55cb category: main optional: false - name: libutf8proc version: 2.8.0 manager: conda platform: osx-arm64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/libutf8proc-2.8.0-h1a8c8d9_0.tar.bz2 + dependencies: + __osx: '>=11.0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libutf8proc-2.8.0-hc098a78_1.conda hash: - md5: f8c9c41a122ab3abdf8943b13f4957ee - sha256: a3faddac08efd930fa3a1cc254b5053b4ed9428c49a888d437bf084d403c931a + md5: ed89b8bf0d74d23ce47bcf566dd36608 + sha256: 7807a98522477a8bf12460402845224f607ab6e1e73ac316b667169f5143cfe5 category: main optional: false - name: libuuid @@ -9044,15 +8838,15 @@ package: category: main optional: false - name: libuv - version: 1.49.1 + version: 1.49.2 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.49.1-h7ab814d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libuv-1.49.2-h7ab814d_0.conda hash: - md5: e3efd5e5ca0eaa06fd7619f9f1c80e9c - sha256: 47c3d7fad65258d13aa30967609310f8ff8b27b414bb8087fa60343b7e9fc400 + md5: 4bc348e3a1a74d20a3f9beb866d75e0a + sha256: 0e5176af1e788ad5006cf261c4ea5a288a935fda48993b0240ddd2e562dc3d02 category: main optional: false - name: libwebp @@ -9155,62 +8949,35 @@ package: category: main optional: false - name: libxml2 - version: 2.12.7 + version: 2.13.5 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' icu: '>=75.1,<76.0a0' libiconv: '>=1.17,<2.0a0' + liblzma: '>=5.6.3,<6.0a0' libzlib: '>=1.3.1,<2.0a0' - xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.12.7-h01dff8b_4.conda - hash: - md5: 1265488dc5035457b729583119ad4a1b - sha256: a9a76cdc6e93c0182bc2ac58b1ea0152be1a16a5d23f4dc7b8df282a7aef8d20 - category: main - optional: false -- name: libxslt - version: 1.1.39 - manager: conda - platform: osx-arm64 - dependencies: - libxml2: '>=2.12.1,<3.0.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libxslt-1.1.39-h223e5b9_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.13.5-h178c5d8_1.conda hash: - md5: 560c9cacc33e927f55b998eaa0cb1732 - sha256: 2f1d99ef3fb960f23a63f06cf65ee621a5594a8b4616f35d9805be44617a92af + md5: 3dc3cff0eca1640a6acbbfab2f78139e + sha256: d7af3f25a4cece170502acd38f2dafbea4521f373f46dcb28a37fbe6ac2da544 category: main optional: false - name: libzip - version: 1.11.1 + version: 1.11.2 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' bzip2: '>=1.0.8,<2.0a0' - libgcc: '>=13' - libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.3.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda - hash: - md5: e8536ec89df2aec5f65fefcf4ccd58ba - sha256: d2b20d0a307beef9d313f56cfcf3ce74d1a53b728124cecee0b3bea657bbf30b - category: main - optional: false -- name: libzip - version: 1.11.1 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - bzip2: '>=1.0.8,<2.0a0' + libgcc: '>=13' libzlib: '>=1.3.1,<2.0a0' openssl: '>=3.3.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libzip-1.11.1-hfc4440f_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda hash: - md5: 5651a1c56eeaf4237d80aef6e9def33a - sha256: bd7f60bc8c31c9f61b1852703e129eeef6adb8c2c55ecd47ca4c50a24043c99f + md5: a7b27c075c9b7f459f1c022090697cba + sha256: 991e7348b0f650d495fb6d8aa9f8c727bdf52dabf5853c0cc671439b160dce48 category: main optional: false - name: libzlib @@ -9239,27 +9006,27 @@ package: category: main optional: false - name: llvm-openmp - version: 19.1.1 + version: 19.1.5 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' - url: https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-19.1.1-h024ca30_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/llvm-openmp-19.1.5-h024ca30_0.conda hash: - md5: ea889be010d5d66a7e6dd5e1b04c70d7 - sha256: 780739b625ce1836fde67884b34abb6e193402de297d25aab81c21467210fd74 + md5: dc90d15c25a57f641f0b84c271e4761e + sha256: e319db1e18dabe23ddeb4a1e04ff1ab5e331069a5a558891ffeb60c8b76d5e6a category: main optional: false - name: llvm-openmp - version: 19.1.1 + version: 19.1.5 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-19.1.1-hb52a8e5_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/llvm-openmp-19.1.5-hdb05f8b_0.conda hash: - md5: 6eab363cb011e739cf6f3bb92b763525 - sha256: bac90d68cd6a1b5f0ae21e900715d425b02a3be8f6199a5e2dbcb126d8525a6e + md5: f2c2e187a1d2637d282e34dc92021a70 + sha256: e7ba0d8b718925efdcf1309f5e776e3264cc172d3af8d4048b39627c50a1abc0 category: main optional: false - name: locket @@ -9403,31 +9170,31 @@ package: category: main optional: false - name: mako - version: 1.3.5 + version: 1.3.8 manager: conda platform: linux-64 dependencies: importlib-metadata: '' markupsafe: '>=0.9.2' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.5-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.8-pyhd8ed1ab_0.conda hash: - md5: 29fddbfa0e2361636a98de4f46ead2ac - sha256: f0b982e18e31ad373dd8f22ef5ffa0ae112fc13c573a5eb614814b4081c3ddcb + md5: f34282b4bb9259eeee2a8a6f1c5ec235 + sha256: 0b4477623f78f15ec0521970a3f18c6308ed72f33dc44bbf8c3ccbfaaf7137a2 category: main optional: false - name: mako - version: 1.3.5 + version: 1.3.8 manager: conda platform: osx-arm64 dependencies: importlib-metadata: '' + python: '>=3.9' markupsafe: '>=0.9.2' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/mako-1.3.8-pyhd8ed1ab_0.conda hash: - md5: 29fddbfa0e2361636a98de4f46ead2ac - sha256: f0b982e18e31ad373dd8f22ef5ffa0ae112fc13c573a5eb614814b4081c3ddcb + md5: f34282b4bb9259eeee2a8a6f1c5ec235 + sha256: 0b4477623f78f15ec0521970a3f18c6308ed72f33dc44bbf8c3ccbfaaf7137a2 category: main optional: false - name: mapclassify @@ -9441,10 +9208,10 @@ package: python: '>=3.9' scikit-learn: '>=1.0' scipy: '>=1.8' - url: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_1.conda hash: - md5: e75920f936efb86f64517d144d610107 - sha256: ce49505ac5c1d2d0bab6543b057c7cf698b0135ef92cd0eb151a41ea09d24c8c + md5: c48bbb2bcc3f9f46741a7915d67e6839 + sha256: c498a016b233be5a7defee443733a82d5fe41b83016ca8a136876a64fd15564b category: main optional: false - name: mapclassify @@ -9452,16 +9219,16 @@ package: manager: conda platform: osx-arm64 dependencies: - networkx: '>=2.7' - numpy: '>=1.23' - pandas: '>=1.4,!=1.5.0' python: '>=3.9' - scikit-learn: '>=1.0' + numpy: '>=1.23' scipy: '>=1.8' - url: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_0.conda + scikit-learn: '>=1.0' + networkx: '>=2.7' + pandas: '>=1.4,!=1.5.0' + url: https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.8.1-pyhd8ed1ab_1.conda hash: - md5: e75920f936efb86f64517d144d610107 - sha256: ce49505ac5c1d2d0bab6543b057c7cf698b0135ef92cd0eb151a41ea09d24c8c + md5: c48bbb2bcc3f9f46741a7915d67e6839 + sha256: c498a016b233be5a7defee443733a82d5fe41b83016ca8a136876a64fd15564b category: main optional: false - name: markdown @@ -9482,8 +9249,8 @@ package: manager: conda platform: osx-arm64 dependencies: - importlib-metadata: '>=4.4' python: '>=3.6' + importlib-metadata: '>=4.4' url: https://conda.anaconda.org/conda-forge/noarch/markdown-3.6-pyhd8ed1ab_0.conda hash: md5: 06e9bebf748a0dea03ecbe1f0e27e909 @@ -9496,11 +9263,11 @@ package: platform: linux-64 dependencies: mdurl: '>=0.1,<1' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda hash: - md5: 93a8e71256479c62074356ef6ebf501b - sha256: c041b0eaf7a6af3344d5dd452815cdc148d6284fec25a4fa3f4263b3a021e962 + md5: fee3164ac23dfca50cfcc8b85ddefb81 + sha256: 0fbacdfb31e55964152b24d5567e9a9996e1e7902fb08eb7d91b5fd6ce60803a category: main optional: false - name: markdown-it-py @@ -9508,16 +9275,16 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' mdurl: '>=0.1,<1' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda hash: - md5: 93a8e71256479c62074356ef6ebf501b - sha256: c041b0eaf7a6af3344d5dd452815cdc148d6284fec25a4fa3f4263b3a021e962 + md5: fee3164ac23dfca50cfcc8b85ddefb81 + sha256: 0fbacdfb31e55964152b24d5567e9a9996e1e7902fb08eb7d91b5fd6ce60803a category: main optional: false - name: markupsafe - version: 3.0.1 + version: 3.0.2 manager: conda platform: linux-64 dependencies: @@ -9525,54 +9292,54 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.1-py39h9399b63_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py39h9399b63_1.conda hash: - md5: 0782842622e8dc374909a8c39bafe9f3 - sha256: 7e5408dafa67ea3c3e5657f5f4b90f086c1cdb9da17c1d18fd578b98bfb5e45e + md5: 7821f0938aa629b9f17efd98c300a487 + sha256: a8bce47de4572f46da0713f54bdf54a3ca7bb65d0fa3f5d94dd967f6db43f2e9 category: main optional: false - name: markupsafe - version: 3.0.1 + version: 3.0.2 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.1-py39hf992724_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/markupsafe-3.0.2-py39hefdd603_1.conda hash: - md5: 4a06f43a6a4c54c73d22a43742e68c69 - sha256: f704c0393b7d9ae26e921430f1fe22bf7d9f10eb550daf84455fa3f5334cc6fd + md5: 4ab96cbd1bca81122f08b758397201b2 + sha256: a289c9f1ea3af6248c714f55b99382ecc78bc2a2a0bd55730fa25eaea6bc5d4a category: main optional: false - name: marshmallow - version: 3.22.0 + version: 3.23.1 manager: conda platform: linux-64 dependencies: packaging: '>=17.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.22.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.23.1-pyhd8ed1ab_1.conda hash: - md5: 8fa11f6581dccf45bc0c6e664bd34268 - sha256: 7f29d717fd7a356a22ed1cd1b2c8c2324b4c442367001a4ab1d9bf467656e4fe + md5: 25e5de7b1a0da24f64dc0b8d5e5f1f2f + sha256: eeff31a9247dc1321bc8f3f4c77b5daaab508de96154ac3ae207288a450adcb7 category: main optional: false - name: marshmallow - version: 3.22.0 + version: 3.23.1 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' packaging: '>=17.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.22.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.23.1-pyhd8ed1ab_1.conda hash: - md5: 8fa11f6581dccf45bc0c6e664bd34268 - sha256: 7f29d717fd7a356a22ed1cd1b2c8c2324b4c442367001a4ab1d9bf467656e4fe + md5: 25e5de7b1a0da24f64dc0b8d5e5f1f2f + sha256: eeff31a9247dc1321bc8f3f4c77b5daaab508de96154ac3ae207288a450adcb7 category: main optional: false - name: matplotlib-base - version: 3.9.2 + version: 3.9.3 manager: conda platform: linux-64 dependencies: @@ -9595,14 +9362,14 @@ package: python_abi: 3.9.* qhull: '>=2020.2,<2020.3.0a0' tk: '>=8.6.13,<8.7.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py39h16632d1_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.3-py39h16632d1_0.conda hash: - md5: 83d48ae12dfd01615013e2e8ace6ff86 - sha256: 275e3828fe05e4dab34b9c478ee1145622b383425391b3470e2510869d4bb3af + md5: 93aa7d8c91f38dd494134f009cd0860c + sha256: 946b885997411dc7057305fc403926173831ddd5f30c2040acd338b7f006ca0d category: main optional: false - name: matplotlib-base - version: 3.9.2 + version: 3.9.3 manager: conda platform: osx-arm64 dependencies: @@ -9614,7 +9381,7 @@ package: freetype: '>=2.12.1,<3.0a0' importlib-resources: '>=3.2.0' kiwisolver: '>=1.3.1' - libcxx: '>=17' + libcxx: '>=18' numpy: '>=1.23' packaging: '>=20.0' pillow: '>=8' @@ -9623,10 +9390,10 @@ package: python-dateutil: '>=2.7' python_abi: 3.9.* qhull: '>=2020.2,<2020.3.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.9.2-py39hc57f556_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/matplotlib-base-3.9.3-py39h7251d6c_0.conda hash: - md5: 7c3fdc4315776fab67558692495da548 - sha256: 74487502ac95d2390bf01a68ce419648ee213af680aea340c2039dc2cd056d8d + md5: 2713040239f1411f78a553d8eb2736cc + sha256: 6206dde98ad72f86b5b5ef7645f1e0b0b56931f5411ec7bed98772b7dc2d57de category: main optional: false - name: matplotlib-inline @@ -9634,12 +9401,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' + python: '>=3.9' traitlets: '' - url: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda hash: - md5: 779345c95648be40d22aaa89de7d4254 - sha256: 7ea68676ea35fbb095420bbcc1c82c4767b8be7bb56abb6989b7f89d957a3bab + md5: af6ab708897df59bd6e7283ceab1b56b + sha256: 69b7dc7131703d3d60da9b0faa6dd8acbf6f6c396224cf6aef3e855b8c0c41c6 category: main optional: false - name: matplotlib-inline @@ -9647,12 +9414,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' traitlets: '' - url: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda hash: - md5: 779345c95648be40d22aaa89de7d4254 - sha256: 7ea68676ea35fbb095420bbcc1c82c4767b8be7bb56abb6989b7f89d957a3bab + md5: af6ab708897df59bd6e7283ceab1b56b + sha256: 69b7dc7131703d3d60da9b0faa6dd8acbf6f6c396224cf6aef3e855b8c0c41c6 category: main optional: false - name: mdit-py-plugins @@ -9661,11 +9428,11 @@ package: platform: linux-64 dependencies: markdown-it-py: '>=1.0.0,<4.0.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_1.conda hash: - md5: 5387f2cfa28f8a3afa3368bb4ba201e8 - sha256: 5cedc99412278b37e9596f1f991d49f5a1663fe79767cf814a288134a1400ba9 + md5: af2060041d4f3250a7eb6ab3ec0e549b + sha256: c63ed79d9745109c0a70397713b0c07f06e7d3561abcb122cfc80a141ab3b449 category: main optional: false - name: mdit-py-plugins @@ -9673,12 +9440,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' markdown-it-py: '>=1.0.0,<4.0.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/mdit-py-plugins-0.4.2-pyhd8ed1ab_1.conda hash: - md5: 5387f2cfa28f8a3afa3368bb4ba201e8 - sha256: 5cedc99412278b37e9596f1f991d49f5a1663fe79767cf814a288134a1400ba9 + md5: af2060041d4f3250a7eb6ab3ec0e549b + sha256: c63ed79d9745109c0a70397713b0c07f06e7d3561abcb122cfc80a141ab3b449 category: main optional: false - name: mdurl @@ -9686,11 +9453,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda hash: - md5: 776a8dd9e824f77abac30e6ef43a8f7a - sha256: 64073dfb6bb429d52fff30891877b48c7ec0f89625b1bf844905b66a81cce6e1 + md5: 592132998493b3ff25fd7479396e8351 + sha256: 78c1bbe1723449c52b7a9df1af2ee5f005209f67e40b6e1d3c7619127c43b1c7 category: main optional: false - name: mdurl @@ -9698,11 +9465,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda hash: - md5: 776a8dd9e824f77abac30e6ef43a8f7a - sha256: 64073dfb6bb429d52fff30891877b48c7ec0f89625b1bf844905b66a81cce6e1 + md5: 592132998493b3ff25fd7479396e8351 + sha256: 78c1bbe1723449c52b7a9df1af2ee5f005209f67e40b6e1d3c7619127c43b1c7 category: main optional: false - name: minizip @@ -9710,18 +9477,19 @@ package: manager: conda platform: linux-64 dependencies: + __glibc: '>=2.17,<3.0.a0' bzip2: '>=1.0.8,<2.0a0' - libgcc-ng: '>=12' + libgcc: '>=13' libiconv: '>=1.17,<2.0a0' - libstdcxx-ng: '>=12' + liblzma: '>=5.6.3,<6.0a0' + libstdcxx: '>=13' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.3.1,<4.0a0' - xz: '>=5.2.6,<6.0a0' + openssl: '>=3.4.0,<4.0a0' zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h05a5f5f_1.conda hash: - md5: 4474532a312b2245c5c77f1176989b46 - sha256: 6315ea87d094418e744deb79a22331718b36a0e6e107cd7fc3c52c7922bc8133 + md5: 0fe4805c6f7f7a2b9d6a705f1477b49f + sha256: 70a1e5fd0023b8783c27a4946c0031b82cbca9b02c30bd7ceda2edf1da8184c9 category: main optional: false - name: minizip @@ -9731,16 +9499,16 @@ package: dependencies: __osx: '>=11.0' bzip2: '>=1.0.8,<2.0a0' - libcxx: '>=16' + libcxx: '>=18' libiconv: '>=1.17,<2.0a0' + liblzma: '>=5.6.3,<6.0a0' libzlib: '>=1.3.1,<2.0a0' - openssl: '>=3.3.1,<4.0a0' - xz: '>=5.2.6,<6.0a0' + openssl: '>=3.4.0,<4.0a0' zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/minizip-4.0.7-h27ee973_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/minizip-4.0.7-hff1a8ea_1.conda hash: - md5: 73dcdab1f21da49048a4f26d648c87a9 - sha256: 8216190bed8462758d1fea34964f4f46e6314e92696d8b6607bde588895663ad + md5: 0c71ae2e8621778b8ddf418826dc55e6 + sha256: 54d1b6bdf55d1783266a81682f55975f89cc222e1860b89147b9282f1c5e8d42 category: main optional: false - name: mistune @@ -9748,11 +9516,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_1.conda hash: - md5: 5cbee699846772cc939bef23a0d524ed - sha256: f95cb70007e3cc2ba44e17c29a056b499e6dadf08746706d0c817c8e2f47e05c + md5: c46df05cae629e55426773ac1f85d68f + sha256: 0a9faaf1692b74f321cedbd37a44f108a1ec3f5d9638bc5bbf860cb3b6ff6db4 category: main optional: false - name: mistune @@ -9760,11 +9528,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_1.conda hash: - md5: 5cbee699846772cc939bef23a0d524ed - sha256: f95cb70007e3cc2ba44e17c29a056b499e6dadf08746706d0c817c8e2f47e05c + md5: c46df05cae629e55426773ac1f85d68f + sha256: 0a9faaf1692b74f321cedbd37a44f108a1ec3f5d9638bc5bbf860cb3b6ff6db4 category: main optional: false - name: mkl @@ -9814,7 +9582,7 @@ package: category: main optional: false - name: mlflow - version: 2.16.2 + version: 2.18.0 manager: conda platform: linux-64 dependencies: @@ -9826,24 +9594,24 @@ package: jinja2: <4,>=2.11 markdown: <4,>=3.3 matplotlib-base: <4 - mlflow-ui: 2.16.2 + mlflow-ui: 2.18.0 numpy: <3 pandas: <3 prometheus_flask_exporter: <1 - pyarrow: <18,>=4.0.0 + pyarrow: <19,>=4.0.0 python_abi: 3.9.* querystring_parser: <2 scikit-learn: <2 scipy: <2 sqlalchemy: '>=1.4.0,<3' - url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-2.16.2-hf3d152e_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-2.18.0-hf3d152e_0.conda hash: - md5: b8a95fccda3c876667da7bf30f8925b5 - sha256: a9f9dff148148f5fda7099b5f287d826d869a0509712935f6bdb312e63b3f53f + md5: b4156c6aa77eb85b4f341c1f8aa7105b + sha256: ace0c883c19623dd4c20a92e5bdab52c460ae409cfe738affcb064bb5e9419cf category: main optional: false - name: mlflow - version: 2.16.2 + version: 2.18.0 manager: conda platform: osx-arm64 dependencies: @@ -9855,25 +9623,25 @@ package: jinja2: <4,>=2.11 markdown: <4,>=3.3 matplotlib-base: <4 - mlflow-ui: 2.16.2 + mlflow-ui: 2.18.0 numpy: <3 pandas: <3 prometheus_flask_exporter: <1 - pyarrow: <18,>=4.0.0 + pyarrow: <19,>=4.0.0 python: '>=3.9,<3.10.0a0' python_abi: 3.9.* querystring_parser: <2 scikit-learn: <2 scipy: <2 sqlalchemy: '>=1.4.0,<3' - url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-2.16.2-py39h2804cbe_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-2.18.0-py39h2804cbe_0.conda hash: - md5: bc23618915cf1a627f1380d5257dd906 - sha256: 196e0b9e441322083a4c7f731e0433c3a2a0eaf7aa30b165b86192eda97890c1 + md5: d374e612dd762d7463e0d8c0b4c98189 + sha256: 029fb3dc3f2c5174a027c57b57409047b6ecf7c2c65f8daf2f7e5ccc08f9f52c category: main optional: false - name: mlflow-skinny - version: 2.16.2 + version: 2.18.0 manager: conda platform: linux-64 dependencies: @@ -9887,21 +9655,21 @@ package: opentelemetry-api: <3,>=1.0.0 opentelemetry-sdk: <3,>=1.0.0 packaging: <25 - protobuf: '>=3.12.0,<5' + protobuf: '>=3.12.0,<6' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pytz: <2025 pyyaml: '>=5.1,<7' requests: '>=2.17.3,<3' sqlparse: '>=0.4.0,<1' - url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-skinny-2.16.2-py39hf3d152e_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-skinny-2.18.0-py39hf3d152e_0.conda hash: - md5: 29170c7dac5865710c5448a206fdb656 - sha256: ea652ca072465c488d21f9bbe789db134ddf4f04727f1e9a7c1a6fc5f3b59b20 + md5: 2d7991d0268d6e6b6414591d97328fb4 + sha256: c9f17cfc07a1f0116a9ef25b3b259c034a18845bd20562fffca3f7dc5e249924 category: main optional: false - name: mlflow-skinny - version: 2.16.2 + version: 2.18.0 manager: conda platform: osx-arm64 dependencies: @@ -9915,51 +9683,51 @@ package: opentelemetry-api: <3,>=1.0.0 opentelemetry-sdk: <3,>=1.0.0 packaging: <25 - protobuf: '>=3.12.0,<5' + protobuf: '>=3.12.0,<6' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pytz: <2025 pyyaml: '>=5.1,<7' requests: '>=2.17.3,<3' sqlparse: '>=0.4.0,<1' - url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-skinny-2.16.2-py39h2804cbe_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-skinny-2.18.0-py39h2804cbe_0.conda hash: - md5: ecfef958781f78fb0c6bb750fc7d5a16 - sha256: efff259f88b31a8280c20786887b39a4eba87e92e251563cd3d65521d27575c2 + md5: fb7baa1f770773347df555e454bed00f + sha256: 2e101f8c506d290e212e6d00365cef6027e4c7a9c69cd707dd642dd2ba270632 category: main optional: false - name: mlflow-ui - version: 2.16.2 + version: 2.18.0 manager: conda platform: linux-64 dependencies: flask: <4 gunicorn: <23 - mlflow-skinny: 2.16.2 + mlflow-skinny: 2.18.0 python: '>=3.9,<3.10.0a0' python_abi: 3.9.* querystring_parser: <2 - url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-ui-2.16.2-py39hf3d152e_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/mlflow-ui-2.18.0-py39hf3d152e_0.conda hash: - md5: 375b2f44a689ee62df07816d50ab2811 - sha256: 1be21e5054a68dc56e87c51e6c51f952ea5dc1b3ad631d393d8ae7bafcfbb3dd + md5: c92b43bf3a051160777bdcb7da2a7db7 + sha256: 1686d8137f47e7c1805bfe69294ecf65fc4c5f53e7eded410d360e6174f8bcbc category: main optional: false - name: mlflow-ui - version: 2.16.2 + version: 2.18.0 manager: conda platform: osx-arm64 dependencies: flask: <4 gunicorn: <23 - mlflow-skinny: 2.16.2 + mlflow-skinny: 2.18.0 python: '>=3.9,<3.10.0a0' python_abi: 3.9.* querystring_parser: <2 - url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-ui-2.16.2-py39h2804cbe_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/mlflow-ui-2.18.0-py39h2804cbe_0.conda hash: - md5: dce36f858b439b4ea8b76641b5be133a - sha256: aadc581f0c2a50eb1e2bd132bbd6a52caab676a92e5c80da3257ff09b8d30d73 + md5: d8d88742bf5d3d4e3a5c570e485a6f04 + sha256: ae8e1c0050f839998c40f8fe5d9f2e61ecf1b72e051d626b8682851f56a1039c category: main optional: false - name: modin @@ -9969,10 +9737,10 @@ package: dependencies: modin-dask: 0.32.0 python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/modin-0.32.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/modin-0.32.0-hd8ed1ab_1.conda hash: - md5: 5ef9b778b155cbb22fd7210e7e51a478 - sha256: 77344425261faa105d80c2911147aaccae62a82e6bb79eb2fe7ec5f5ecc70fe4 + md5: 66aaff37e8519e46d198a85aa0211369 + sha256: 22b7010993a257bbe135a75960c9e6e833a51e8d94aa12bbd76b0e34a8267003 category: main optional: false - name: modin @@ -9980,12 +9748,12 @@ package: manager: conda platform: osx-arm64 dependencies: - modin-dask: 0.32.0 python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/modin-0.32.0-hd8ed1ab_0.conda + modin-dask: 0.32.0 + url: https://conda.anaconda.org/conda-forge/noarch/modin-0.32.0-hd8ed1ab_1.conda hash: - md5: 5ef9b778b155cbb22fd7210e7e51a478 - sha256: 77344425261faa105d80c2911147aaccae62a82e6bb79eb2fe7ec5f5ecc70fe4 + md5: 66aaff37e8519e46d198a85aa0211369 + sha256: 22b7010993a257bbe135a75960c9e6e833a51e8d94aa12bbd76b0e34a8267003 category: main optional: false - name: modin-core @@ -9999,10 +9767,10 @@ package: pandas: '>=2.2,<2.3' psutil: '>=5.8.0' python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/modin-core-0.32.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/modin-core-0.32.0-pyhd8ed1ab_1.conda hash: - md5: aeb9a903fa4d493d9800cfb0275c95b7 - sha256: e5b688ce428cd09779ba0979eae33aebb59f1874dea5a747ca89df14a2c5807a + md5: ca2f5839673cb36d96212ff42cea7462 + sha256: 563c528f51a82a5a77a3f22427b98fd0c64c143f5c849c7eb2a90ca5e2f23a36 category: main optional: false - name: modin-core @@ -10010,16 +9778,16 @@ package: manager: conda platform: osx-arm64 dependencies: - fsspec: '>=2022.11.0' - numpy: '>=1.22.4,<2' + python: '>=3.9' packaging: '>=21.0' - pandas: '>=2.2,<2.3' psutil: '>=5.8.0' - python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/modin-core-0.32.0-pyhd8ed1ab_0.conda + fsspec: '>=2022.11.0' + pandas: '>=2.2,<2.3' + numpy: '>=1.22.4,<2' + url: https://conda.anaconda.org/conda-forge/noarch/modin-core-0.32.0-pyhd8ed1ab_1.conda hash: - md5: aeb9a903fa4d493d9800cfb0275c95b7 - sha256: e5b688ce428cd09779ba0979eae33aebb59f1874dea5a747ca89df14a2c5807a + md5: ca2f5839673cb36d96212ff42cea7462 + sha256: 563c528f51a82a5a77a3f22427b98fd0c64c143f5c849c7eb2a90ca5e2f23a36 category: main optional: false - name: modin-dask @@ -10031,10 +9799,10 @@ package: distributed: '>=2.22.0' modin-core: 0.32.0 python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/modin-dask-0.32.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/modin-dask-0.32.0-hd8ed1ab_1.conda hash: - md5: 68a14fb5f46045e60c6f4d8af8b2ecc0 - sha256: 12a85e1aa7afef2c0311ff03ce21d13d1cb33af58096a868ee2535d6ede12c96 + md5: 6909f33b0eb81be397e756a2ea9db032 + sha256: 07441e0c03871c7a976dc0fd29c9496fa32d6d3729b39e9268de3ef3ecfb51e0 category: main optional: false - name: modin-dask @@ -10042,14 +9810,14 @@ package: manager: conda platform: osx-arm64 dependencies: - dask: '>=2.22.0' + python: '>=3.9' distributed: '>=2.22.0' + dask: '>=2.22.0' modin-core: 0.32.0 - python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/modin-dask-0.32.0-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/modin-dask-0.32.0-hd8ed1ab_1.conda hash: - md5: 68a14fb5f46045e60c6f4d8af8b2ecc0 - sha256: 12a85e1aa7afef2c0311ff03ce21d13d1cb33af58096a868ee2535d6ede12c96 + md5: 6909f33b0eb81be397e756a2ea9db032 + sha256: 07441e0c03871c7a976dc0fd29c9496fa32d6d3729b39e9268de3ef3ecfb51e0 category: main optional: false - name: monotonic @@ -10137,11 +9905,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mpmath-1.3.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mpmath-1.3.0-pyhd8ed1ab_1.conda hash: - md5: dbf6e2d89137da32fa6670f3bffc024e - sha256: a4f025c712ec1502a55c471b56a640eaeebfce38dd497d5a1a33729014cac47a + md5: 3585aa87c43ab15b167b574cd73b057b + sha256: 7d7aa3fcd6f42b76bd711182f3776a02bef09a68c5f117d66b712a6d81368692 category: main optional: false - name: mpmath @@ -10149,11 +9917,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mpmath-1.3.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mpmath-1.3.0-pyhd8ed1ab_1.conda hash: - md5: dbf6e2d89137da32fa6670f3bffc024e - sha256: a4f025c712ec1502a55c471b56a640eaeebfce38dd497d5a1a33729014cac47a + md5: 3585aa87c43ab15b167b574cd73b057b + sha256: 7d7aa3fcd6f42b76bd711182f3776a02bef09a68c5f117d66b712a6d81368692 category: main optional: false - name: msgpack-python @@ -10197,10 +9965,10 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '' - url: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py39h8cd3c5a_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py39h9399b63_2.conda hash: - md5: aee790a18fe2312124edb118e6cea2c0 - sha256: ccc30a354da887270d994eac3927eb81692bd8b617faedc258387a9bb3b55168 + md5: 4d59f2dd00df802a4825900be4402ea3 + sha256: f19e5e122311714532993be82b85a859084e2693978838698035044f7ac7f94a category: main optional: false - name: multidict @@ -10212,10 +9980,10 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/multidict-6.1.0-py39ha5f49b9_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/multidict-6.1.0-py39hafbbd28_1.conda hash: - md5: 9f8ccc7e39c1fb27c08ae33273e4efdf - sha256: 31edde1d184f466f073836c79400a2b867475d298b73ea7427dc313491159046 + md5: 86ef19edffe1dd289b698f762de815c7 + sha256: dedfeb8ee259e238444591e1462b70e6d97b69049bbbc0cf0bf664cc8b729865 category: main optional: false - name: multimethod @@ -10300,11 +10068,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_1.conda hash: - md5: 4eccaeba205f0aed9ac3a9ea58568ca3 - sha256: f240217476e148e825420c6bc3a0c0efb08c0718b7042fae960400c02af858a3 + md5: 29097e7ea634a45cc5386b95cac6568f + sha256: 1895f47b7d68581a6facde5cb13ab8c2764c2e53a76bd746f8f98910dc4e08fe category: main optional: false - name: mypy_extensions @@ -10312,11 +10080,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_1.conda hash: - md5: 4eccaeba205f0aed9ac3a9ea58568ca3 - sha256: f240217476e148e825420c6bc3a0c0efb08c0718b7042fae960400c02af858a3 + md5: 29097e7ea634a45cc5386b95cac6568f + sha256: 1895f47b7d68581a6facde5cb13ab8c2764c2e53a76bd746f8f98910dc4e08fe category: main optional: false - name: myst-nb @@ -10346,17 +10114,17 @@ package: manager: conda platform: osx-arm64 dependencies: + pyyaml: '' + typing_extensions: '' + ipython: '' importlib-metadata: '' ipykernel: '' - ipython: '' - jupyter-cache: '>=0.5' - myst-parser: '>=1.0.0' nbclient: '' - nbformat: '>=5.0' python: '>=3.9' - pyyaml: '' + nbformat: '>=5.0' sphinx: '>=5' - typing_extensions: '' + myst-parser: '>=1.0.0' + jupyter-cache: '>=0.5' url: https://conda.anaconda.org/conda-forge/noarch/myst-nb-1.1.2-pyhd8ed1ab_0.conda hash: md5: 38e1b2f0f62e9976cf9fe54a54258e3c @@ -10386,13 +10154,13 @@ package: manager: conda platform: osx-arm64 dependencies: - docutils: '>=0.18,<0.22' + pyyaml: '' jinja2: '' + python: '>=3.8' markdown-it-py: '>=3.0.0,<4.0.0' mdit-py-plugins: '>=0.4,<1' - python: '>=3.8' - pyyaml: '' sphinx: '>=6,<8' + docutils: '>=0.18,<0.22' url: https://conda.anaconda.org/conda-forge/noarch/myst-parser-3.0.1-pyhd8ed1ab_0.conda hash: md5: 7a1ab67ee32e0d58ce55134d7a56b8fe @@ -10404,15 +10172,15 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/namex-0.0.8-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/namex-0.0.8-pyhd8ed1ab_1.conda hash: - md5: b96883bd4ee5a6aef4636674783a6d57 - sha256: 9ed4061868dfd2b97363ae3ee08f192b822d1f7f999c7c081deed43310b3e5af + md5: 6427624437ca3bcd8634f35dc74716fc + sha256: 6194f52e8ce2853174f3e3702e1d980718473277ed8b6a9f9e7148ceefbf35bf category: main optional: false - name: nbclient - version: 0.10.0 + version: 0.10.1 manager: conda platform: linux-64 dependencies: @@ -10421,26 +10189,26 @@ package: nbformat: '>=5.1' python: '>=3.8' traitlets: '>=5.4' - url: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.1-pyhd8ed1ab_0.conda hash: - md5: 15b51397e0fe8ea7d7da60d83eb76ebc - sha256: 589d72d36d61a23b39d6fff2c488f93e29e20de4fc6f5d315b5f2c16e81028bf + md5: 3ee79082e59a28e1db11e2a9c3bcd85a + sha256: 564e22c4048f2f00c7ee79417dea364f95cf069a1f2565dc26d5ece1fc3fd779 category: main optional: false - name: nbclient - version: 0.10.0 + version: 0.10.1 manager: conda platform: osx-arm64 dependencies: + python: '>=3.8' jupyter_client: '>=6.1.12' jupyter_core: '>=4.12,!=5.0.*' nbformat: '>=5.1' - python: '>=3.8' traitlets: '>=5.4' - url: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.1-pyhd8ed1ab_0.conda hash: - md5: 15b51397e0fe8ea7d7da60d83eb76ebc - sha256: 589d72d36d61a23b39d6fff2c488f93e29e20de4fc6f5d315b5f2c16e81028bf + md5: 3ee79082e59a28e1db11e2a9c3bcd85a + sha256: 564e22c4048f2f00c7ee79417dea364f95cf069a1f2565dc26d5ece1fc3fd779 category: main optional: false - name: nbconvert-core @@ -10465,10 +10233,10 @@ package: python: '>=3.8' tinycss2: '' traitlets: '>=5.0' - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhff2d567_2.conda hash: - md5: e2d2abb421c13456a9a9f80272fdf543 - sha256: 074d858c5808e0a832acc0da37cd70de1565e8d6e17a62d5a11b3902b5e78319 + md5: 0457fdf55c88e52e0e7b63691eafcc48 + sha256: 03a1303ce135a8214b450e751d93c9048f55edb37f3f9f06c5e9d78ba3ef2a89 category: main optional: false - name: nbconvert-core @@ -10476,27 +10244,27 @@ package: manager: conda platform: osx-arm64 dependencies: + packaging: '' beautifulsoup4: '' - bleach: '' defusedxml: '' - entrypoints: '>=0.2.2' + bleach: '' + tinycss2: '' + jupyterlab_pygments: '' + python: '>=3.8' jinja2: '>=3.0' + entrypoints: '>=0.2.2' jupyter_core: '>=4.7' - jupyterlab_pygments: '' markupsafe: '>=2.0' - mistune: '>=2.0.3,<4' - nbclient: '>=0.5.0' + traitlets: '>=5.0' nbformat: '>=5.1' - packaging: '' pandocfilters: '>=1.4.1' pygments: '>=2.4.1' - python: '>=3.8' - tinycss2: '' - traitlets: '>=5.0' - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda + nbclient: '>=0.5.0' + mistune: '>=2.0.3,<4' + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhff2d567_2.conda hash: - md5: e2d2abb421c13456a9a9f80272fdf543 - sha256: 074d858c5808e0a832acc0da37cd70de1565e8d6e17a62d5a11b3902b5e78319 + md5: 0457fdf55c88e52e0e7b63691eafcc48 + sha256: 03a1303ce135a8214b450e751d93c9048f55edb37f3f9f06c5e9d78ba3ef2a89 category: main optional: false - name: nbformat @@ -10506,13 +10274,13 @@ package: dependencies: jsonschema: '>=2.6' jupyter_core: '>=4.12,!=5.0.*' - python: '>=3.8' + python: '>=3.9' python-fastjsonschema: '>=2.15' traitlets: '>=5.1' - url: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda hash: - md5: 0b57b5368ab7fc7cdc9e3511fa867214 - sha256: 36fe73da4d37bc7ac2d1540526ecd294fbd09acda04e096181ab8f1ccd2b464c + md5: bbe1963f1e47f594070ffe87cdf612ea + sha256: 7a5bd30a2e7ddd7b85031a5e2e14f290898098dc85bea5b3a5bf147c25122838 category: main optional: false - name: nbformat @@ -10520,15 +10288,15 @@ package: manager: conda platform: osx-arm64 dependencies: - jsonschema: '>=2.6' + python: '>=3.9' jupyter_core: '>=4.12,!=5.0.*' - python: '>=3.8' - python-fastjsonschema: '>=2.15' traitlets: '>=5.1' - url: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda + jsonschema: '>=2.6' + python-fastjsonschema: '>=2.15' + url: https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda hash: - md5: 0b57b5368ab7fc7cdc9e3511fa867214 - sha256: 36fe73da4d37bc7ac2d1540526ecd294fbd09acda04e096181ab8f1ccd2b464c + md5: bbe1963f1e47f594070ffe87cdf612ea + sha256: 7a5bd30a2e7ddd7b85031a5e2e14f290898098dc85bea5b3a5bf147c25122838 category: main optional: false - name: nccl @@ -10542,10 +10310,10 @@ package: libgcc-ng: '>=12' libstdcxx: '' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/nccl-2.23.4.1-h03a54cd_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/nccl-2.23.4.1-h03a54cd_3.conda hash: - md5: 84df066b3b35c59a697af6066137b2a6 - sha256: 65129b24f10e70a17990adf374ec8d1eb028474c03402c173d5cad797208c3d6 + md5: 5ea398a88c7271b2e3ec56cd33da424f + sha256: 9a620c1f5c9e31b56e4e7771d9505da52970fd1c93aa9c581e5d008907c41c1f category: main optional: false - name: ncurses @@ -10574,7 +10342,7 @@ package: category: main optional: false - name: neptune - version: 1.12.0 + version: 1.13.0 manager: conda platform: linux-64 dependencies: @@ -10597,40 +10365,40 @@ package: swagger-spec-validator: '>=2.7.4' urllib3: '' websocket-client: '>=0.35.0' - url: https://conda.anaconda.org/conda-forge/noarch/neptune-1.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/neptune-1.13.0-pyhd8ed1ab_0.conda hash: - md5: 96fa16a96257d1f72b208b255be42cd3 - sha256: fac32c7a2acbee2eb7b81104732868f87bdc7d1264f2547faa37d82e19deac49 + md5: fcc75f04b6b0c45664ff49670f96f0df + sha256: c86526acf6c168b6740eefe2175de943473380612c929d51e34b3b827f401348 category: main optional: false - name: neptune - version: 1.12.0 + version: 1.13.0 manager: conda platform: osx-arm64 dependencies: - boto3: '>=1.28.0' - bravado: '>=11.0.0,<12.0.0' - click: '>=7.0' - future: '>=0.17.1' - gitpython: '>=2.0.8' - jsonschema-with-format: '' - oauthlib: '>=2.1.0' - packaging: '' pandas: '' - pillow: '>=1.1.6' + packaging: '' psutil: '' + urllib3: '' pyjwt: '' + jsonschema-with-format: '' python: '>=3.6' + click: '>=7.0' requests: '>=2.20.0' - requests-oauthlib: '>=1.0.0' six: '>=1.12.0' - swagger-spec-validator: '>=2.7.4' - urllib3: '' + future: '>=0.17.1' + requests-oauthlib: '>=1.0.0' + gitpython: '>=2.0.8' + oauthlib: '>=2.1.0' + pillow: '>=1.1.6' websocket-client: '>=0.35.0' - url: https://conda.anaconda.org/conda-forge/noarch/neptune-1.12.0-pyhd8ed1ab_0.conda + swagger-spec-validator: '>=2.7.4' + bravado: '>=11.0.0,<12.0.0' + boto3: '>=1.28.0' + url: https://conda.anaconda.org/conda-forge/noarch/neptune-1.13.0-pyhd8ed1ab_0.conda hash: - md5: 96fa16a96257d1f72b208b255be42cd3 - sha256: fac32c7a2acbee2eb7b81104732868f87bdc7d1264f2547faa37d82e19deac49 + md5: fcc75f04b6b0c45664ff49670f96f0df + sha256: c86526acf6c168b6740eefe2175de943473380612c929d51e34b3b827f401348 category: main optional: false - name: nest-asyncio @@ -10638,11 +10406,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda hash: - md5: 6598c056f64dc8800d40add25e4e2c34 - sha256: 30db21d1f7e59b3408b831a7e0417b83b53ee6223afae56482c5f26da3ceb49a + md5: 598fd7d4d0de2455fb74f56063969a97 + sha256: bb7b21d7fd0445ddc0631f64e66d91a179de4ba920b8381f29b9d006a42788c0 category: main optional: false - name: nest-asyncio @@ -10650,11 +10418,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.6.0-pyhd8ed1ab_1.conda hash: - md5: 6598c056f64dc8800d40add25e4e2c34 - sha256: 30db21d1f7e59b3408b831a7e0417b83b53ee6223afae56482c5f26da3ceb49a + md5: 598fd7d4d0de2455fb74f56063969a97 + sha256: bb7b21d7fd0445ddc0631f64e66d91a179de4ba920b8381f29b9d006a42788c0 category: main optional: false - name: networkx @@ -10699,8 +10467,8 @@ package: manager: conda platform: osx-arm64 dependencies: - python: 2.7|>=3.7 setuptools: '' + python: 2.7|>=3.7 url: https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda hash: md5: dfe0528d0f1c16c1f7c528ea5536ab30 @@ -10719,37 +10487,39 @@ package: category: main optional: false - name: notebook - version: 7.2.2 + version: 7.3.1 manager: conda platform: linux-64 dependencies: + importlib_resources: '>=5.0' jupyter_server: '>=2.4.0,<3' - jupyterlab: '>=4.2.0,<4.3' + jupyterlab: '>=4.3.2,<4.4' jupyterlab_server: '>=2.27.1,<3' notebook-shim: '>=0.2,<0.3' - python: '>=3.8' + python: '>=3.9' tornado: '>=6.2.0' - url: https://conda.anaconda.org/conda-forge/noarch/notebook-7.2.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/notebook-7.3.1-pyhd8ed1ab_0.conda hash: - md5: c4d5a58f43ce9ffa430e6ecad6c30a42 - sha256: 613242d5151a4d70438bb2d65041c509e4376b7e18c06c3795c52a18176e41dc + md5: f663ab5bcc9a28364b7b80aa976ed00f + sha256: d5bd4e3c27b2fd234c5d79f3749cd6139d5b13a88cb7320f93c239aabc28e576 category: main optional: false - name: notebook - version: 7.2.2 + version: 7.3.1 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' + tornado: '>=6.2.0' jupyter_server: '>=2.4.0,<3' - jupyterlab: '>=4.2.0,<4.3' jupyterlab_server: '>=2.27.1,<3' notebook-shim: '>=0.2,<0.3' - python: '>=3.8' - tornado: '>=6.2.0' - url: https://conda.anaconda.org/conda-forge/noarch/notebook-7.2.2-pyhd8ed1ab_0.conda + importlib_resources: '>=5.0' + jupyterlab: '>=4.3.2,<4.4' + url: https://conda.anaconda.org/conda-forge/noarch/notebook-7.3.1-pyhd8ed1ab_0.conda hash: - md5: c4d5a58f43ce9ffa430e6ecad6c30a42 - sha256: 613242d5151a4d70438bb2d65041c509e4376b7e18c06c3795c52a18176e41dc + md5: f663ab5bcc9a28364b7b80aa976ed00f + sha256: d5bd4e3c27b2fd234c5d79f3749cd6139d5b13a88cb7320f93c239aabc28e576 category: main optional: false - name: notebook-shim @@ -10758,11 +10528,11 @@ package: platform: linux-64 dependencies: jupyter_server: '>=1.8,<3' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda hash: - md5: 3d85618e2c97ab896b5b5e298d32b5b3 - sha256: 9b5fdef9ebe89222baa9da2796ebe7bc02ec6c5a1f61327b651d6b92cf9a0230 + md5: e7f89ea5f7ea9401642758ff50a2d9c1 + sha256: 7b920e46b9f7a2d2aa6434222e5c8d739021dbc5cc75f32d124a8191d86f9056 category: main optional: false - name: notebook-shim @@ -10770,70 +10540,43 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' jupyter_server: '>=1.8,<3' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/notebook-shim-0.2.4-pyhd8ed1ab_1.conda hash: - md5: 3d85618e2c97ab896b5b5e298d32b5b3 - sha256: 9b5fdef9ebe89222baa9da2796ebe7bc02ec6c5a1f61327b651d6b92cf9a0230 + md5: e7f89ea5f7ea9401642758ff50a2d9c1 + sha256: 7b920e46b9f7a2d2aa6434222e5c8d739021dbc5cc75f32d124a8191d86f9056 category: main optional: false - name: nspr - version: '4.35' + version: '4.36' manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda - hash: - md5: da0ec11a6454ae19bff5b02ed881a2b1 - sha256: 8fadeebb2b7369a4f3b2c039a980d419f65c7b18267ba0c62588f9f894396d0c - category: main - optional: false -- name: nspr - version: '4.35' - manager: conda - platform: osx-arm64 - dependencies: - libcxx: '>=14.0.6' - url: https://conda.anaconda.org/conda-forge/osx-arm64/nspr-4.35-hb7217d7_0.conda + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libstdcxx: '>=13' + url: https://conda.anaconda.org/conda-forge/linux-64/nspr-4.36-h5888daf_0.conda hash: - md5: f81b5ec944dbbcff3dd08375eb036efa - sha256: 35959d36ea9e8a2c422db9f113ee0ac91a9b0c19c51b05f75d0793c3827cfa3a + md5: de9cd5bca9e4918527b9b72b6e2e1409 + sha256: a87471d9265a7c02a98c20debac8b13afd80963968ed7b1c1c2ac7b80955ce31 category: main optional: false - name: nss - version: '3.105' + version: '3.107' manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - libsqlite: '>=3.46.1,<4.0a0' + libsqlite: '>=3.47.0,<4.0a0' libstdcxx: '>=13' libzlib: '>=1.3.1,<2.0a0' - nspr: '>=4.35,<5.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/nss-3.105-hd34e28f_0.conda - hash: - md5: 28d7602527b76052422aaf5d6fd7ad81 - sha256: 4888112f00f46490169e60cd2455af78e53d67d6ca70eb8c4e203d6e990bcfd0 - category: main - optional: false -- name: nss - version: '3.105' - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - libcxx: '>=17' - libsqlite: '>=3.46.1,<4.0a0' - libzlib: '>=1.3.1,<2.0a0' - nspr: '>=4.35,<5.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/nss-3.105-hd1ce637_0.conda + nspr: '>=4.36,<5.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/nss-3.107-hdf54f9c_0.conda hash: - md5: be138f3b3df410f3f6b6dce66bb17a69 - sha256: fb0209b22117d143daba1ed2a2d66244b271b698197bb7d159fccabeff1757b5 + md5: 294b7009fe9010b35c25bb683f663bc3 + sha256: 4a901b96cc8d371cc71ab5cf1e3184c234ae7e74c4d50b3789d4bdadcd0f3c40 category: main optional: false - name: numpy @@ -10879,11 +10622,11 @@ package: blinker: '' cryptography: '' pyjwt: '>=1.0.0' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_1.conda hash: - md5: 8f882b197fd9c4941a787926baea4868 - sha256: 0cfd5146a91d3974f4abfc2a45de890371d510a77238fe553e036ec8c031dc5b + md5: bf5f2c90d503d43a8c45cedf766b4b8e + sha256: bec65607d36759e85aab2331ff7f056cb32be0bca92ee2b955aea3306330bd1b category: main optional: false - name: oauthlib @@ -10891,14 +10634,14 @@ package: manager: conda platform: osx-arm64 dependencies: - blinker: '' cryptography: '' + blinker: '' + python: '>=3.9' pyjwt: '>=1.0.0' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/oauthlib-3.2.2-pyhd8ed1ab_1.conda hash: - md5: 8f882b197fd9c4941a787926baea4868 - sha256: 0cfd5146a91d3974f4abfc2a45de890371d510a77238fe553e036ec8c031dc5b + md5: bf5f2c90d503d43a8c45cedf766b4b8e + sha256: bec65607d36759e85aab2331ff7f056cb32be0bca92ee2b955aea3306330bd1b category: main optional: false - name: onnx @@ -10921,21 +10664,21 @@ package: category: main optional: false - name: onnx - version: 1.16.2 + version: 1.17.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' libcxx: '>=17' - libprotobuf: '>=4.25.3,<4.25.4.0a0' + libprotobuf: '>=5.27.5,<5.27.6.0a0' numpy: '>=1.19,<3' protobuf: '' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/onnx-1.16.2-py39hfe7ccb6_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/onnx-1.17.0-py39h071c012_0.conda hash: - md5: 7bf39703d2336d693b3e974784601265 - sha256: d919baf4cd982b8ff4b9e8002bdb9bdae2ada6e79dabb9133481d3d9602fec8e + md5: f51344314de7d861e9e2a272c80b2584 + sha256: 4907a38367fcfe45af4e7305f890a1ecb2def0752ce1f29d6d1b63eae433f2cb category: main optional: false - name: onnxconverter-common @@ -10960,9 +10703,9 @@ package: platform: osx-arm64 dependencies: numpy: '' - onnx: '' packaging: '' protobuf: '' + onnx: '' python: '>=3.6' url: https://conda.anaconda.org/conda-forge/noarch/onnxconverter-common-1.13.0-pyhd8ed1ab_0.tar.bz2 hash: @@ -10971,7 +10714,7 @@ package: category: main optional: false - name: openai - version: 1.51.2 + version: 1.57.2 manager: conda platform: linux-64 dependencies: @@ -10985,31 +10728,31 @@ package: tqdm: '>4' typing-extensions: '>=4.11,<5' typing_extensions: '>=4.11,<5' - url: https://conda.anaconda.org/conda-forge/noarch/openai-1.51.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/openai-1.57.2-pyhd8ed1ab_0.conda hash: - md5: a0e16f99e703f961114605f617b894e2 - sha256: cebc3595713b0a6966b1d655a4290bd777a5887f7478751424718a5684ae47ee + md5: 2eefb0a296f6ecb54118a886ec95feeb + sha256: 651a2f8557426bd1f79583569cc10705ac794162aa7728b89ffa31866a7294c5 category: main optional: false - name: openai - version: 1.51.2 + version: 1.57.2 manager: conda platform: osx-arm64 dependencies: - anyio: '>=3.5.0,<5' - distro: '>=1.7.0,<2' + sniffio: '' + python: '>=3.7.1' httpx: '>=0.23.0,<1' - jiter: '>=0.4.0,<1' + distro: '>=1.7.0,<2' pydantic: '>=1.9.0,<3' - python: '>=3.7.1' - sniffio: '' + anyio: '>=3.5.0,<5' tqdm: '>4' + jiter: '>=0.4.0,<1' typing-extensions: '>=4.11,<5' typing_extensions: '>=4.11,<5' - url: https://conda.anaconda.org/conda-forge/noarch/openai-1.51.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/openai-1.57.2-pyhd8ed1ab_0.conda hash: - md5: a0e16f99e703f961114605f617b894e2 - sha256: cebc3595713b0a6966b1d655a4290bd777a5887f7478751424718a5684ae47ee + md5: 2eefb0a296f6ecb54118a886ec95feeb + sha256: 651a2f8557426bd1f79583569cc10705ac794162aa7728b89ffa31866a7294c5 category: main optional: false - name: openjpeg @@ -11029,61 +10772,62 @@ package: category: main optional: false - name: openjpeg - version: 2.5.2 + version: 2.5.3 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=16' - libpng: '>=1.6.43,<1.7.0a0' - libtiff: '>=4.6.0,<4.8.0a0' - libzlib: '>=1.2.13,<2.0.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/openjpeg-2.5.2-h9f1df11_0.conda + __osx: '>=11.0' + libcxx: '>=18' + libpng: '>=1.6.44,<1.7.0a0' + libtiff: '>=4.7.0,<4.8.0a0' + libzlib: '>=1.3.1,<2.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/openjpeg-2.5.3-h8a3d83b_0.conda hash: - md5: 5029846003f0bc14414b9128a1f7c84b - sha256: 472d6eaffc1996e6af35ec8e91c967f472a536a470079bfa56383cc0dbf4d463 + md5: 4b71d78648dbcf68ce8bf22bb07ff838 + sha256: 1d59bc72ca7faac06d349c1a280f5cfb8a57ee5896f1e24225a997189d7418c7 category: main optional: false - name: openldap - version: 2.6.8 + version: 2.6.9 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' cyrus-sasl: '>=2.1.27,<3.0a0' - krb5: '>=1.21.2,<1.22.0a0' - libcxx: '>=16' - openssl: '>=3.3.0,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/openldap-2.6.8-h50f2afc_0.conda + krb5: '>=1.21.3,<1.22.0a0' + libcxx: '>=18' + openssl: '>=3.4.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/openldap-2.6.9-hbe55e7a_0.conda hash: - md5: d7d7451d23b52d99eadad211de640ff4 - sha256: f04e9522b971b96b306752dd55f8046634cb6d95a2c271672c02e658dc1eb7c8 + md5: 8291e59e1dd136bceecdefbc7207ecd6 + sha256: 5ae85f00a9dcf438e375d4fb5c45c510c7116e32c4b7af608ffd88e9e9dc6969 category: main optional: false - name: openssl - version: 3.3.2 + version: 3.4.0 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' ca-certificates: '' libgcc: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda hash: - md5: 4d638782050ab6faa27275bed57e9b4e - sha256: cee91036686419f6dd6086902acf7142b4916e1c4ba042e9ca23e151da012b6d + md5: 23cc74f77eb99315c0360ec3533147a9 + sha256: 814b9dff1847b132c676ee6cc1a8cb2d427320779b93e1b6d76552275c128705 category: main optional: false - name: openssl - version: 3.3.2 + version: 3.4.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' ca-certificates: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.3.2-h8359307_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.4.0-h39f12f2_0.conda hash: - md5: 1773ebccdc13ec603356e8ff1db9e958 - sha256: 940fa01c4dc6152158fe8943e05e55a1544cab639df0994e3b35937839e4f4d1 + md5: df307bbc703324722df0293c9ca2e418 + sha256: bd1d58ced46e75efa3b842c61642fd12272c69e9fe4d7261078bc082153a1d53 category: main optional: false - name: opentelemetry-api @@ -11105,8 +10849,8 @@ package: manager: conda platform: osx-arm64 dependencies: - deprecated: '>=1.2.6' python: '>=3.7' + deprecated: '>=1.2.6' setuptools: '>=16.0' url: https://conda.anaconda.org/conda-forge/noarch/opentelemetry-api-1.16.0-pyhd8ed1ab_0.conda hash: @@ -11135,11 +10879,11 @@ package: manager: conda platform: osx-arm64 dependencies: - opentelemetry-api: 1.16.0 - opentelemetry-semantic-conventions: 0.37b0 python: '>=3.7' - setuptools: '>=16.0' typing-extensions: '>=3.7.4' + setuptools: '>=16.0' + opentelemetry-semantic-conventions: 0.37b0 + opentelemetry-api: 1.16.0 url: https://conda.anaconda.org/conda-forge/noarch/opentelemetry-sdk-1.16.0-pyhd8ed1ab_0.conda hash: md5: 42de278a97f49bebb07fb2cb6c05047c @@ -11175,11 +10919,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/opt_einsum-3.4.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/opt_einsum-3.4.0-pyhd8ed1ab_1.conda hash: - md5: ff80afedd76f436acddbd1e14f5c2909 - sha256: 69e979dfea8b8d82e51684f77d189e1d00cdcbc5c85868415b879719882e2df4 + md5: 52919815cd35c4e1a0298af658ccda04 + sha256: af71aabb2bfa4b2c89b7b06403e5cec23b418452cae9f9772bd7ac3f9ea1ff44 category: main optional: false - name: opt_einsum @@ -11187,11 +10931,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/opt_einsum-3.4.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/opt_einsum-3.4.0-pyhd8ed1ab_1.conda hash: - md5: ff80afedd76f436acddbd1e14f5c2909 - sha256: 69e979dfea8b8d82e51684f77d189e1d00cdcbc5c85868415b879719882e2df4 + md5: 52919815cd35c4e1a0298af658ccda04 + sha256: af71aabb2bfa4b2c89b7b06403e5cec23b418452cae9f9772bd7ac3f9ea1ff44 category: main optional: false - name: optree @@ -11234,17 +10978,17 @@ package: platform: osx-arm64 dependencies: __osx: '>=11.0' - libcxx: '>=16' - libprotobuf: '>=4.25.3,<4.25.4.0a0' + libcxx: '>=17' + libprotobuf: '>=5.27.5,<5.27.6.0a0' libzlib: '>=1.3.1,<2.0a0' lz4-c: '>=1.9.3,<1.10.0a0' snappy: '>=1.2.1,<1.3.0a0' tzdata: '' zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.0.2-h75dedd0_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/orc-2.0.2-h4a9587e_1.conda hash: - md5: 9c89e09cede143716b479c5eacc924fb - sha256: a23f3a88a6b16363bd13f964b4abd12be1576abac460126f3269cbed12d04840 + md5: 47749df556fda8cc1848804bf6011645 + sha256: ee0100b8b449be287d24fffce69444232a47142ca95bbc3d0cdc38ede9d690fb category: main optional: false - name: overrides @@ -11265,8 +11009,8 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' typing_utils: '' + python: '>=3.6' url: https://conda.anaconda.org/conda-forge/noarch/overrides-7.7.0-pyhd8ed1ab_0.conda hash: md5: 24fba5a9d161ad8103d4e84c0e1a3ed4 @@ -11274,27 +11018,27 @@ package: category: main optional: false - name: packaging - version: '24.1' + version: '24.2' manager: conda platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda hash: - md5: cbe1bb1f21567018ce595d9c2be0f0db - sha256: 36aca948219e2c9fdd6d80728bcc657519e02f06c2703d8db3446aec67f51d81 + md5: 3bfed7e6228ebf2f7b9eaa47f1b4e2aa + sha256: da157b19bcd398b9804c5c52fc000fcb8ab0525bdb9c70f95beaa0bb42f85af1 category: main optional: false - name: packaging - version: '24.1' + version: '24.2' manager: conda platform: osx-arm64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda hash: - md5: cbe1bb1f21567018ce595d9c2be0f0db - sha256: 36aca948219e2c9fdd6d80728bcc657519e02f06c2703d8db3446aec67f51d81 + md5: 3bfed7e6228ebf2f7b9eaa47f1b4e2aa + sha256: da157b19bcd398b9804c5c52fc000fcb8ab0525bdb9c70f95beaa0bb42f85af1 category: main optional: false - name: pandas @@ -11337,31 +11081,31 @@ package: category: main optional: false - name: pandera - version: 0.20.4 + version: 0.21.1 manager: conda platform: linux-64 dependencies: - pandera-base: '>=0.20.4,<0.20.5.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + pandera-base: '>=0.21.1,<0.21.2.0a0' + url: https://conda.anaconda.org/conda-forge/noarch/pandera-0.21.1-hd8ed1ab_0.conda hash: - md5: 1f5a3edcac6b4ebded70e2bd9871a6a8 - sha256: 62f85c709021bb721101e4baddf1bb293077d70a5ef0f17bb3cc3c1325c868ae + md5: 08577567efea6cd29ca15ecdec90d635 + sha256: fa3e7c43d486d1f453874bf27173c5802af6b78ceb0f75fb9066adde57d2a2fb category: main optional: false - name: pandera - version: 0.20.4 + version: 0.21.1 manager: conda platform: osx-arm64 dependencies: - pandera-base: '>=0.20.4,<0.20.5.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/pandera-0.20.4-hd8ed1ab_0.conda + pandera-base: '>=0.21.1,<0.21.2.0a0' + url: https://conda.anaconda.org/conda-forge/noarch/pandera-0.21.1-hd8ed1ab_0.conda hash: - md5: 1f5a3edcac6b4ebded70e2bd9871a6a8 - sha256: 62f85c709021bb721101e4baddf1bb293077d70a5ef0f17bb3cc3c1325c868ae + md5: 08577567efea6cd29ca15ecdec90d635 + sha256: fa3e7c43d486d1f453874bf27173c5802af6b78ceb0f75fb9066adde57d2a2fb category: main optional: false - name: pandera-base - version: 0.20.4 + version: 0.21.1 manager: conda platform: linux-64 dependencies: @@ -11374,30 +11118,30 @@ package: typeguard: '' typing_inspect: '>=0.6.0' wrapt: '' - url: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.21.1-pyhd8ed1ab_0.conda hash: - md5: c2a59e20377cedcfa4a4dfa98fa7f36e - sha256: 7b23ec2a4cea4fc2adc10828829fcb044c196719d0dd9ca63f4577b1855eb6c8 + md5: 5463b8ba571256dc6664179ecbfb14df + sha256: 15d38bc43ea31f775675b23a3d3ac9eaa66ef37e06304ddc3440ec106f7f6135 category: main optional: false - name: pandera-base - version: 0.20.4 + version: 0.21.1 manager: conda platform: osx-arm64 dependencies: - multimethod: <=1.10.0 - numpy: '>=1.19.0' + wrapt: '' + typeguard: '' + python: '>=3.8' packaging: '>=20.0' + numpy: '>=1.19.0' pandas: '>=1.2.0' - pydantic: '>=1.5.0' - python: '>=3.8' - typeguard: '' typing_inspect: '>=0.6.0' - wrapt: '' - url: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.20.4-pyhd8ed1ab_0.conda + multimethod: <=1.10.0 + pydantic: '>=1.5.0' + url: https://conda.anaconda.org/conda-forge/noarch/pandera-base-0.21.1-pyhd8ed1ab_0.conda hash: - md5: c2a59e20377cedcfa4a4dfa98fa7f36e - sha256: 7b23ec2a4cea4fc2adc10828829fcb044c196719d0dd9ca63f4577b1855eb6c8 + md5: 5463b8ba571256dc6664179ecbfb14df + sha256: 15d38bc43ea31f775675b23a3d3ac9eaa66ef37e06304ddc3440ec106f7f6135 category: main optional: false - name: pandocfilters @@ -11450,18 +11194,19 @@ package: platform: osx-arm64 dependencies: __osx: '>=11.0' - cairo: '>=1.18.0,<2.0a0' - fontconfig: '>=2.14.2,<3.0a0' + cairo: '>=1.18.2,<2.0a0' + fontconfig: '>=2.15.0,<3.0a0' fonts-conda-ecosystem: '' freetype: '>=2.12.1,<3.0a0' fribidi: '>=1.0.10,<2.0a0' harfbuzz: '>=9.0.0,<10.0a0' - libglib: '>=2.80.3,<3.0a0' - libpng: '>=1.6.43,<1.7.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pango-1.54.0-h9ee27a3_2.conda + libexpat: '>=2.6.4,<3.0a0' + libglib: '>=2.82.2,<3.0a0' + libpng: '>=1.6.44,<1.7.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/pango-1.54.0-h3e3e505_3.conda hash: - md5: af2a2118261adf2d7a350d6767b450f2 - sha256: cfa2d11204bb75f6fbcfe1ff0cc1f6e4fc01185bf07b8eee8f698bfbd3702a79 + md5: 89fb53976952a229a13271272bf8cb10 + sha256: 4264f49cb550b9164c6a570978c3b9b1404215c1279dba592a90391d324a177a category: main optional: false - name: papermill @@ -11491,17 +11236,17 @@ package: manager: conda platform: osx-arm64 dependencies: - aiohttp: '>=3.9,<3.10' - ansicolors: '' + requests: '' + pyyaml: '' click: '' entrypoints: '' - nbclient: '>=0.2.0' - nbformat: '>=5.2.0' + ansicolors: '' python: '>=3.7' - pyyaml: '' - requests: '' - tenacity: '>=5.0.2' + nbformat: '>=5.2.0' tqdm: '>=4.32.2' + nbclient: '>=0.2.0' + aiohttp: '>=3.9,<3.10' + tenacity: '>=5.0.2' url: https://conda.anaconda.org/conda-forge/noarch/papermill-2.6.0-pyhd8ed1ab_0.conda hash: md5: 7e2150bca46f713bb6e290ac1b26ed1d @@ -11516,11 +11261,11 @@ package: bcrypt: '>=3.2' cryptography: '>=3.3' pynacl: '>=1.5' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.5.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.5.0-pyhd8ed1ab_1.conda hash: - md5: 3a359c35a1f9ec2859fbddcabcfd4c4d - sha256: f2c3ac882c1123a71479c15ecec0c632aa004bc8a8c10daf25d69461ea1da38a + md5: 92e18207b16a4e4790cdcb4e0bcdad60 + sha256: b5c2c348ec7ae4ac57422d3499fe611c05b63311d396713ba9125820bf305163 category: main optional: false - name: paramiko @@ -11528,14 +11273,14 @@ package: manager: conda platform: osx-arm64 dependencies: - bcrypt: '>=3.2' + python: '>=3.9' cryptography: '>=3.3' + bcrypt: '>=3.2' pynacl: '>=1.5' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.5.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.5.0-pyhd8ed1ab_1.conda hash: - md5: 3a359c35a1f9ec2859fbddcabcfd4c4d - sha256: f2c3ac882c1123a71479c15ecec0c632aa004bc8a8c10daf25d69461ea1da38a + md5: 92e18207b16a4e4790cdcb4e0bcdad60 + sha256: b5c2c348ec7ae4ac57422d3499fe611c05b63311d396713ba9125820bf305163 category: main optional: false - name: parso @@ -11543,11 +11288,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda hash: - md5: 81534b420deb77da8833f2289b8d47ac - sha256: bfe404eebb930cc41782d34f8fc04c0388ea692eeebe2c5fc28df8ec8d4d61ae + md5: 5c092057b6badd30f75b06244ecd01c9 + sha256: 17131120c10401a99205fc6fe436e7903c0fa092f1b3e80452927ab377239bcc category: main optional: false - name: parso @@ -11555,11 +11300,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda hash: - md5: 81534b420deb77da8833f2289b8d47ac - sha256: bfe404eebb930cc41782d34f8fc04c0388ea692eeebe2c5fc28df8ec8d4d61ae + md5: 5c092057b6badd30f75b06244ecd01c9 + sha256: 17131120c10401a99205fc6fe436e7903c0fa092f1b3e80452927ab377239bcc category: main optional: false - name: partd @@ -11581,9 +11326,9 @@ package: manager: conda platform: osx-arm64 dependencies: + toolz: '' locket: '' python: '>=3.9' - toolz: '' url: https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda hash: md5: 0badf9c54e24cecfb0ad2f99d680c163 @@ -11649,11 +11394,11 @@ package: platform: linux-64 dependencies: ptyprocess: '>=0.5' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda hash: - md5: 629f3203c99b32e0988910c93e77f3b6 - sha256: 90a09d134a4a43911b716d4d6eb9d169238aff2349056f7323d9db613812667e + md5: d0d408b1f18883a944376da5cf8101ea + sha256: 202af1de83b585d36445dc1fda94266697341994d1a3328fabde4989e1b3d07a category: main optional: false - name: pexpect @@ -11661,12 +11406,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' ptyprocess: '>=0.5' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda hash: - md5: 629f3203c99b32e0988910c93e77f3b6 - sha256: 90a09d134a4a43911b716d4d6eb9d169238aff2349056f7323d9db613812667e + md5: d0d408b1f18883a944376da5cf8101ea + sha256: 202af1de83b585d36445dc1fda94266697341994d1a3328fabde4989e1b3d07a category: main optional: false - name: pickleshare @@ -11674,11 +11419,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3' - url: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda hash: - md5: 415f0ebb6198cc2801c73438a9fb5761 - sha256: a1ed1a094dd0d1b94a09ed85c283a0eb28943f2e6f22161fb45e128d35229738 + md5: 11a9d1d09a3615fc07c3faf79bc0b943 + sha256: e2ac3d66c367dada209fc6da43e645672364b9fd5f9d28b9f016e24b81af475b category: main optional: false - name: pickleshare @@ -11686,11 +11431,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3' - url: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda hash: - md5: 415f0ebb6198cc2801c73438a9fb5761 - sha256: a1ed1a094dd0d1b94a09ed85c283a0eb28943f2e6f22161fb45e128d35229738 + md5: 11a9d1d09a3615fc07c3faf79bc0b943 + sha256: e2ac3d66c367dada209fc6da43e645672364b9fd5f9d28b9f016e24b81af475b category: main optional: false - name: pillow @@ -11717,7 +11462,7 @@ package: category: main optional: false - name: pillow - version: 10.4.0 + version: 11.0.0 manager: conda platform: osx-arm64 dependencies: @@ -11725,71 +11470,73 @@ package: freetype: '>=2.12.1,<3.0a0' lcms2: '>=2.16,<3.0a0' libjpeg-turbo: '>=3.0.0,<4.0a0' - libtiff: '>=4.6.0,<4.8.0a0' + libtiff: '>=4.7.0,<4.8.0a0' libwebp-base: '>=1.4.0,<2.0a0' - libxcb: '>=1.16,<2.0.0a0' + libxcb: '>=1.17.0,<2.0a0' libzlib: '>=1.3.1,<2.0a0' openjpeg: '>=2.5.2,<3.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* tk: '>=8.6.13,<8.7.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-10.4.0-py39hab9ce06_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-11.0.0-py39h4ac03e3_0.conda hash: - md5: cb97314a76af34d50ee989a4dcc11078 - sha256: 1ba2c6b727f641f2060d7a8dc053a9d32a9689a8467e192e532658c8ee033b1f + md5: 879240a84c5b0648192acce6bda484c0 + sha256: 727ceb4f3a57eed4b46c364da313199bdd2cb58e19b213a1e8d91237078636b0 category: main optional: false - name: pip - version: '24.2' + version: 24.3.1 manager: conda platform: linux-64 dependencies: python: '>=3.8,<3.13.0a0' setuptools: '' wheel: '' - url: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda hash: - md5: 6c78fbb8ddfd64bcb55b5cbafd2d2c43 - sha256: d820e5358bcb117fa6286e55d4550c60b0332443df62121df839eab2d11c890b + md5: 5dd546fe99b44fda83963d15f84263b7 + sha256: 499313e72e20225f84c2e9690bbaf5b952c8d7e0bf34b728278538f766b81628 category: main optional: false - name: pip - version: '24.2' + version: 24.3.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8,<3.13.0a0' setuptools: '' wheel: '' - url: https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda + python: '>=3.8,<3.13.0a0' + url: https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda hash: - md5: 6c78fbb8ddfd64bcb55b5cbafd2d2c43 - sha256: d820e5358bcb117fa6286e55d4550c60b0332443df62121df839eab2d11c890b + md5: 5dd546fe99b44fda83963d15f84263b7 + sha256: 499313e72e20225f84c2e9690bbaf5b952c8d7e0bf34b728278538f766b81628 category: main optional: false - name: pixman - version: 0.43.2 + version: 0.44.2 manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + libstdcxx: '>=13' + url: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda hash: - md5: 71004cbf7924e19c02746ccde9fd7123 - sha256: 366d28e2a0a191d6c535e234741e0cd1d94d713f76073d8af4a5ccb2a266121e + md5: 5e2a7acfa2c24188af39e7944e1b3604 + sha256: 747c58db800d5583fee78e76240bf89cbaeedf7ab1ef339c2990602332b9c4be category: main optional: false - name: pixman - version: 0.43.4 + version: 0.44.2 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.43.4-hebf3989_0.conda + __osx: '>=11.0' + libcxx: '>=18' + url: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.44.2-h2f9eb0b_0.conda hash: - md5: 0308c68e711cd295aaa026a4f8c4b1e5 - sha256: df0ba2710ccdea5c909b63635529797f6eb3635b6fb77ae9cb2f183d08818409 + md5: fa8e429fdb9e5b757281f69b8cc4330b + sha256: 28855d4cb2d9fc9a6bd9196dadbaecd6868ec706394cec2f88824a61ba4b1bc0 category: main optional: false - name: pkgutil-resolve-name @@ -11797,11 +11544,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda hash: - md5: 405678b942f2481cecdb3e010f4925d9 - sha256: fecf95377134b0e8944762d92ecf7b0149c07d8186fb5db583125a2705c7ea0a + md5: 5a5870a74432aa332f7d32180633ad05 + sha256: adb2dde5b4f7da70ae81309cce6188ed3286ff280355cf1931b45d91164d2ad8 category: main optional: false - name: pkgutil-resolve-name @@ -11809,11 +11556,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda hash: - md5: 405678b942f2481cecdb3e010f4925d9 - sha256: fecf95377134b0e8944762d92ecf7b0149c07d8186fb5db583125a2705c7ea0a + md5: 5a5870a74432aa332f7d32180633ad05 + sha256: adb2dde5b4f7da70ae81309cce6188ed3286ff280355cf1931b45d91164d2ad8 category: main optional: false - name: platformdirs @@ -11821,11 +11568,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda hash: - md5: fd8f2b18b65bbf62e8f653100690c8d2 - sha256: c81bdeadc4adcda216b2c7b373f0335f5c78cc480d1d55d10f21823590d7e46f + md5: 577852c7e53901ddccc7e6a9959ddebe + sha256: bb50f6499e8bc1d1a26f17716c97984671121608dc0c3ecd34858112bce59a27 category: main optional: false - name: platformdirs @@ -11833,11 +11580,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda hash: - md5: fd8f2b18b65bbf62e8f653100690c8d2 - sha256: c81bdeadc4adcda216b2c7b373f0335f5c78cc480d1d55d10f21823590d7e46f + md5: 577852c7e53901ddccc7e6a9959ddebe + sha256: bb50f6499e8bc1d1a26f17716c97984671121608dc0c3ecd34858112bce59a27 category: main optional: false - name: plotly @@ -11846,12 +11593,12 @@ package: platform: linux-64 dependencies: packaging: '' - python: '>=3.6' + python: '>=3.9' tenacity: '>=6.2.0' - url: https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.1-pyhd8ed1ab_1.conda hash: - md5: 81bb643d6c3ab4cbeaf724e9d68d0a6a - sha256: 39cef6d3056211840709054b90badfa4efd6f61ea37935a89ab0b549a54cc83f + md5: 71ac632876630091c81c50a05ec5e030 + sha256: d1bbf2d80105bfc8a7ed9817888f4a1686ed393d6435572921add09cc9347c1c category: main optional: false - name: plotly @@ -11860,16 +11607,16 @@ package: platform: osx-arm64 dependencies: packaging: '' - python: '>=3.6' + python: '>=3.9' tenacity: '>=6.2.0' - url: https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.1-pyhd8ed1ab_1.conda hash: - md5: 81bb643d6c3ab4cbeaf724e9d68d0a6a - sha256: 39cef6d3056211840709054b90badfa4efd6f61ea37935a89ab0b549a54cc83f + md5: 71ac632876630091c81c50a05ec5e030 + sha256: d1bbf2d80105bfc8a7ed9817888f4a1686ed393d6435572921add09cc9347c1c category: main optional: false - name: polars - version: 1.9.0 + version: 1.17.1 manager: conda platform: linux-64 dependencies: @@ -11879,14 +11626,14 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing_extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/linux-64/polars-1.9.0-py39h74f158a_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/polars-1.17.1-py39h0cd0d40_0.conda hash: - md5: e507477c6f586b2c1aec3cffbd5d5765 - sha256: 823349a74c40c4629b3aea97179c257ca2d47b84fb9dfaee9970b4d4727499c7 + md5: 61d726e861b268c5d128465645b565f6 + sha256: 9a573d9f29382afd6360a495f85823700b3ef440e3360b73ca7ee9c00d099f00 category: main optional: false - name: polars - version: 1.9.0 + version: 1.17.1 manager: conda platform: osx-arm64 dependencies: @@ -11895,10 +11642,10 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing_extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/polars-1.9.0-py39h040d221_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/polars-1.17.1-py39h1a13fb3_0.conda hash: - md5: a2f5f1e363b1cf689c6dee426f2023cc - sha256: c8b80a1a87a1fc9ad9b2218e5b0ea0a4c3c36776016a6ba5d00ba0ee6555f1bb + md5: 634ec17c0eceb45fb1875afcd8599820 + sha256: 2ef0294afa5fdf1624a319aeb2c7a8ba19e7184e9420652d7279ab55e33d90c1 category: main optional: false - name: poppler @@ -11930,36 +11677,6 @@ package: sha256: b313920277aca763b590dddf806c56b0aadcdff82f5ace39827cab4792ae4b20 category: main optional: false -- name: poppler - version: 24.08.0 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - cairo: '>=1.18.0,<2.0a0' - fontconfig: '>=2.14.2,<3.0a0' - fonts-conda-ecosystem: '' - freetype: '>=2.12.1,<3.0a0' - lcms2: '>=2.16,<3.0a0' - libcurl: '>=8.9.1,<9.0a0' - libcxx: '>=17' - libglib: '>=2.80.3,<3.0a0' - libiconv: '>=1.17,<2.0a0' - libintl: '>=0.22.5,<1.0a0' - libjpeg-turbo: '>=3.0.0,<4.0a0' - libpng: '>=1.6.43,<1.7.0a0' - libtiff: '>=4.6.0,<4.8.0a0' - libzlib: '>=1.3.1,<2.0a0' - nspr: '>=4.35,<5.0a0' - nss: '>=3.103,<4.0a0' - openjpeg: '>=2.5.2,<3.0a0' - poppler-data: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/poppler-24.08.0-h37b219d_1.conda - hash: - md5: 7926153cd183b32ba82966ab548611ab - sha256: a6b5abfcb9b44049f80e85d91fd1de2cfb2c18c9831c8f9efef9923bcac6051d - category: main - optional: false - name: poppler-data version: 0.4.12 manager: conda @@ -11971,17 +11688,6 @@ package: sha256: 2f227e17b3c0346112815faa605502b66c1c4511a856127f2899abf15a98a2cf category: main optional: false -- name: poppler-data - version: 0.4.12 - manager: conda - platform: osx-arm64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda - hash: - md5: d8d7293c5b37f39b2ac32940621c6592 - sha256: 2f227e17b3c0346112815faa605502b66c1c4511a856127f2899abf15a98a2cf - category: main - optional: false - name: postgresql version: '16.4' manager: conda @@ -11997,35 +11703,10 @@ package: readline: '>=8.2,<9.0a0' tzcode: '' tzdata: '' - url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_2.conda - hash: - md5: 35090da8ada8eb6834c50efde3ead25d - sha256: a21b582129dc69ff44c8ca524542d2713f20c5cf4b842e806eb4173c169ea71e - category: main - optional: false -- name: postgresql - version: '17.0' - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - icu: '>=75.1,<76.0a0' - krb5: '>=1.21.3,<1.22.0a0' - libpq: '17.0' - libxml2: '>=2.12.7,<3.0a0' - libxslt: '>=1.1.39,<2.0a0' - libzlib: '>=1.3.1,<2.0a0' - lz4-c: '>=1.9.3,<1.10.0a0' - openldap: '>=2.6.8,<2.7.0a0' - openssl: '>=3.3.2,<4.0a0' - readline: '>=8.2,<9.0a0' - tzcode: '' - tzdata: '' - zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/postgresql-17.0-h25379d5_3.conda + url: https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_3.conda hash: - md5: 0f6351dc09d5410726ed1d5c6d03e3e5 - sha256: f83dd89bbb7c76fee1a65e14ae438312598182b22274d806caded45bc4e6747c + md5: 3ba28956641b0c59640bf2b124d2d731 + sha256: aad93b8cb17380baadd7530962afdb647976502efffa76ff1da5d685855980d9 category: main optional: false - name: pre-commit @@ -12050,11 +11731,11 @@ package: manager: conda platform: osx-arm64 dependencies: - cfgv: '>=2.0.0' - identify: '>=1.0.0' - nodeenv: '>=0.11.1' python: '>=3.9' pyyaml: '>=5.1' + identify: '>=1.0.0' + nodeenv: '>=0.11.1' + cfgv: '>=2.0.0' virtualenv: '>=20.10.0' url: https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda hash: @@ -12106,44 +11787,44 @@ package: category: main optional: false - name: proj - version: 9.5.0 + version: 9.5.1 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libcurl: '>=8.10.0,<9.0a0' - libcxx: '>=17' - libsqlite: '>=3.46.1,<4.0a0' - libtiff: '>=4.6.0,<4.8.0a0' + libcurl: '>=8.10.1,<9.0a0' + libcxx: '>=18' + libsqlite: '>=3.47.0,<4.0a0' + libtiff: '>=4.7.0,<4.8.0a0' sqlite: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/proj-9.5.0-h61a8e3e_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/proj-9.5.1-h1318a7e_0.conda hash: - md5: 7b9888f46634eb49eece8fa6e16406d6 - sha256: df44f24dc325fff7480f20fb404dad03015b9e646aa25e0eb24d1edd3930164e + md5: 5eb42e77ae79b46fabcb0f6f6d130763 + sha256: c6289d6f1a13f28ff3754ac0cb2553f7e7bc4a3102291115f62a04995d0421eb category: main optional: false - name: prometheus_client - version: 0.21.0 + version: 0.21.1 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda hash: - md5: 07e9550ddff45150bfc7da146268e165 - sha256: 01f0c3dd00081637ed920a922b17bcc8ed49608404ee466ced806856e671f6b9 + md5: 3e01e386307acc60b2f89af0b2e161aa + sha256: bc8f00d5155deb7b47702cb8370f233935704100dbc23e30747c161d1b6cf3ab category: main optional: false - name: prometheus_client - version: 0.21.0 + version: 0.21.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.21.1-pyhd8ed1ab_0.conda hash: - md5: 07e9550ddff45150bfc7da146268e165 - sha256: 01f0c3dd00081637ed920a922b17bcc8ed49608404ee466ced806856e671f6b9 + md5: 3e01e386307acc60b2f89af0b2e161aa + sha256: bc8f00d5155deb7b47702cb8370f233935704100dbc23e30747c161d1b6cf3ab category: main optional: false - name: prometheus_flask_exporter @@ -12179,12 +11860,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' + python: '>=3.9' wcwidth: '' - url: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_1.conda hash: - md5: 4c05134c48b6a74f33bbb9938e4a115e - sha256: 44e4e6108d425a666856a52d1523e5d70890256a8920bb0dcd3d55cc750f3207 + md5: 368d4aa48358439e07a97ae237491785 + sha256: 79fb7d1eeb490d4cc1b79f781bb59fe302ae38cf0a30907ecde75a7d399796cc category: main optional: false - name: prompt-toolkit @@ -12192,12 +11873,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' wcwidth: '' - url: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_1.conda hash: - md5: 4c05134c48b6a74f33bbb9938e4a115e - sha256: 44e4e6108d425a666856a52d1523e5d70890256a8920bb0dcd3d55cc750f3207 + md5: 368d4aa48358439e07a97ae237491785 + sha256: 79fb7d1eeb490d4cc1b79f781bb59fe302ae38cf0a30907ecde75a7d399796cc category: main optional: false - name: prompt_toolkit @@ -12206,10 +11887,10 @@ package: platform: linux-64 dependencies: prompt-toolkit: '>=3.0.48,<3.0.49.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/prompt_toolkit-3.0.48-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/prompt_toolkit-3.0.48-hd8ed1ab_1.conda hash: - md5: 60a2aeff42b5d629d45cc1be38ec1c5d - sha256: a26eed22badba036b35b8f0a3cc4d17130d7e43c80d3aa258b465dd7d69362a0 + md5: bf730bb1f201e3f5a961c1fb2ffc4f05 + sha256: e4dd1b4eb467589edd51981c341d8ae0b3a71814541bd5fdcf0e55b5be22c4c0 category: main optional: false - name: prompt_toolkit @@ -12218,14 +11899,14 @@ package: platform: osx-arm64 dependencies: prompt-toolkit: '>=3.0.48,<3.0.49.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/prompt_toolkit-3.0.48-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/prompt_toolkit-3.0.48-hd8ed1ab_1.conda hash: - md5: 60a2aeff42b5d629d45cc1be38ec1c5d - sha256: a26eed22badba036b35b8f0a3cc4d17130d7e43c80d3aa258b465dd7d69362a0 + md5: bf730bb1f201e3f5a961c1fb2ffc4f05 + sha256: e4dd1b4eb467589edd51981c341d8ae0b3a71814541bd5fdcf0e55b5be22c4c0 category: main optional: false - name: propcache - version: 0.2.0 + version: 0.2.1 manager: conda platform: linux-64 dependencies: @@ -12233,50 +11914,50 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py39h8cd3c5a_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.1-py39h8cd3c5a_0.conda hash: - md5: 2fd375656ffb2f1bbb44d0eb9f723239 - sha256: 5a5d128ec87e7fa564b14b2adf625aea56e4d7ef880f0224464afc48b3cc2415 + md5: 361659eb3438d4f196008b2e1366c5d3 + sha256: 5299924e011bbc2d434809d8245f733f583402adc7a1de10b7ad221e013ce70b category: main optional: false - name: propcache - version: 0.2.0 + version: 0.2.1 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/propcache-0.2.0-py39h06df861_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/propcache-0.2.1-py39hf3bc14e_0.conda hash: - md5: f1e8608db7862f9dfc0ec07e80f265ff - sha256: f91913368e17f4ceef532a946c437374dbd73b3ea56729462a23986ea032feb5 + md5: 57dc3dec04833d87c778794f70e37da9 + sha256: cb59681beb5179a910765b254b7bedf86c1825a6993cc2409ac83a45b45fe448 category: main optional: false - name: proto-plus - version: 1.23.0 + version: 1.25.0 manager: conda platform: linux-64 dependencies: - protobuf: '>=3.19.0,<5.0.0dev' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.23.0-pyhd8ed1ab_0.conda + protobuf: '>=3.19.0,<6.0.0dev' + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.25.0-pyhd8ed1ab_1.conda hash: - md5: 26c043ffe1c027eaed894d70ea04a18d - sha256: 2c9ca8233672032fb372792b1e4c2a556205e631dc375c2c606eab478f32349d + md5: 6f6b281b0fb1ee21099cec9ee672730c + sha256: ca3689fdf5f03f0dfc5b7c9ecd8cb1dfbcf4f49b1e8e92e9421d68ebfc42d502 category: main optional: false - name: proto-plus - version: 1.23.0 + version: 1.25.0 manager: conda platform: osx-arm64 dependencies: - protobuf: '>=3.19.0,<5.0.0dev' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.23.0-pyhd8ed1ab_0.conda + python: '>=3.9' + protobuf: '>=3.19.0,<6.0.0dev' + url: https://conda.anaconda.org/conda-forge/noarch/proto-plus-1.25.0-pyhd8ed1ab_1.conda hash: - md5: 26c043ffe1c027eaed894d70ea04a18d - sha256: 2c9ca8233672032fb372792b1e4c2a556205e631dc375c2c606eab478f32349d + md5: 6f6b281b0fb1ee21099cec9ee672730c + sha256: ca3689fdf5f03f0dfc5b7c9ecd8cb1dfbcf4f49b1e8e92e9421d68ebfc42d502 category: main optional: false - name: protobuf @@ -12297,25 +11978,22 @@ package: category: main optional: false - name: protobuf - version: 4.25.3 + version: 5.27.5 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libabseil: '>=20240116.2,<20240117.0a0' libcxx: '>=17' - libprotobuf: '>=4.25.3,<4.25.4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - setuptools: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/protobuf-4.25.3-py39hd6078d7_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/protobuf-5.27.5-py39hfa9831e_0.conda hash: - md5: 1409e6033e05e2da32198810b2551098 - sha256: f3a1a8d6b90647890797037fd2db02efcc4c8700f3758b583278e55a15e5e552 + md5: b15c5728f2de69623c696e7199215f56 + sha256: f892b386af3259c73f0aa366a51626a2b82b4f1269070339d728372673c9af54 category: main optional: false - name: psutil - version: 6.0.0 + version: 6.1.0 manager: conda platform: linux-64 dependencies: @@ -12323,24 +12001,24 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py39h8cd3c5a_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py39h8cd3c5a_0.conda hash: - md5: 658a024659b412cba60eb14a394f0d54 - sha256: c08f2d667bbe80530c614f01da227c1aa33df8e4ec76274fad2c90c7c00f6aef + md5: ef257b7ce1e1cb152639ced6bc653475 + sha256: 057765763fc2b7cc8d429e055240209ae83ae6631c80060bad590bbbc8f01f22 category: main optional: false - name: psutil - version: 6.0.0 + version: 6.1.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-6.0.0-py39h57695bc_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-6.1.0-py39h57695bc_0.conda hash: - md5: 68253dcc43431a5e9277602d3240c2c2 - sha256: 0f68f4e9f24f08ee9a923a6d6c34e13a3d545251c6c00022db6ea99396975db0 + md5: 7521b2d7f1337893b7b9a513a264caa1 + sha256: 7caa6892871b78fd609fa24136005a2b34e711076c35abaa70a873aa1ce27fde category: main optional: false - name: psycopg2 @@ -12393,8 +12071,8 @@ package: manager: conda platform: osx-arm64 dependencies: - psycopg2: '>=2.9.9,<2.9.10.0a0' python: '>=3.6' + psycopg2: '>=2.9.9,<2.9.10.0a0' url: https://conda.anaconda.org/conda-forge/noarch/psycopg2-binary-2.9.9-pyhd8ed1ab_0.conda hash: md5: c15b2ec0570f8988819eea58286dbc19 @@ -12431,11 +12109,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda hash: - md5: 359eeb6536da0e687af562ed265ec263 - sha256: fb31e006a25eb2e18f3440eb8d17be44c8ccfae559499199f73584566d0a444a + md5: 7d9daffbb8d8e0af0f769dbbcd173a54 + sha256: a7713dfe30faf17508ec359e0bc7e0983f5d94682492469bd462cdaae9c64d83 category: main optional: false - name: ptyprocess @@ -12443,11 +12121,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda hash: - md5: 359eeb6536da0e687af562ed265ec263 - sha256: fb31e006a25eb2e18f3440eb8d17be44c8ccfae559499199f73584566d0a444a + md5: 7d9daffbb8d8e0af0f769dbbcd173a54 + sha256: a7713dfe30faf17508ec359e0bc7e0983f5d94682492469bd462cdaae9c64d83 category: main optional: false - name: pure_eval @@ -12455,11 +12133,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda hash: - md5: 0f051f09d992e0d08941706ad519ee0e - sha256: dcfcb3cee1ae0a89729601582cc3edea20ba13c9493967a03a693c67567af0c8 + md5: 3bfdfb8dbcdc4af1ae3f9a8eb3948f04 + sha256: 71bd24600d14bb171a6321d523486f6a06f855e75e547fa0cb2a0953b02047f0 category: main optional: false - name: pure_eval @@ -12467,11 +12145,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda hash: - md5: 0f051f09d992e0d08941706ad519ee0e - sha256: dcfcb3cee1ae0a89729601582cc3edea20ba13c9493967a03a693c67567af0c8 + md5: 3bfdfb8dbcdc4af1ae3f9a8eb3948f04 + sha256: 71bd24600d14bb171a6321d523486f6a06f855e75e547fa0cb2a0953b02047f0 category: main optional: false - name: py4j @@ -12516,40 +12194,38 @@ package: category: main optional: false - name: pyarrow - version: 17.0.0 + version: 18.0.0 manager: conda platform: osx-arm64 dependencies: - libarrow-acero: 17.0.0.* - libarrow-dataset: 17.0.0.* - libarrow-substrait: 17.0.0.* - libparquet: 17.0.0.* - numpy: '>=1.19,<3' - pyarrow-core: 17.0.0 + libarrow-acero: 18.0.0.* + libarrow-dataset: 18.0.0.* + libarrow-substrait: 18.0.0.* + libparquet: 18.0.0.* + pyarrow-core: 18.0.0 python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-17.0.0-py39h2beb185_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-18.0.0-py39hdf13c20_2.conda hash: - md5: fdc98a42428697066420b413e9ae840b - sha256: 2e1e3409f85c8b906a64006040b3160595996bb64d9604879676b4a22aa6b878 + md5: 44af9ae873d266ef92f35c8a57733697 + sha256: cd3114a1c0ceab264e0e8712efa7bb1cb45562ecd4c8791b7f29ededf8ad1b8b category: main optional: false - name: pyarrow-core - version: 17.0.0 + version: 18.0.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libarrow: 17.0.0.* - libcxx: '>=17' + libarrow: 18.0.0.* + libcxx: '>=18' libzlib: '>=1.3.1,<2.0a0' - numpy: '>=1.19,<3' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-core-17.0.0-py39ha397a34_1_cpu.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-core-18.0.0-py39h35f5be7_2_cpu.conda hash: - md5: e1956003a18fbf2a275ba8e82c4b56d7 - sha256: d7555745de4253e027052a5935574729c9f3856f0531e8e9dd3b4b7d24a02518 + md5: 82bef129c6c988b4591f270035c7d2f4 + sha256: 3505d75005b2274c23199818b72f017345368435f270db6ed9d9f073e1a40c1b category: main optional: false - name: pyarrow-hotfix @@ -12570,8 +12246,8 @@ package: manager: conda platform: osx-arm64 dependencies: - pyarrow: '>=0.14' python: '>=3.5' + pyarrow: '>=0.14' url: https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda hash: md5: ccc06e6ef2064ae129fab3286299abda @@ -12583,11 +12259,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda hash: - md5: 960ae8a1852b4e0fbeafe439fa7f4eab - sha256: 7f8d61f80e548ed29e452bb51742f0370614f210156cd8355b89803c3f3999d5 + md5: 09bb17ed307ad6ab2fd78d32372fdd4e + sha256: d06051df66e9ab753683d7423fcef873d78bb0c33bd112c3d5be66d529eddf06 category: main optional: false - name: pyasn1 @@ -12595,11 +12271,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-0.6.1-pyhd8ed1ab_2.conda hash: - md5: 960ae8a1852b4e0fbeafe439fa7f4eab - sha256: 7f8d61f80e548ed29e452bb51742f0370614f210156cd8355b89803c3f3999d5 + md5: 09bb17ed307ad6ab2fd78d32372fdd4e + sha256: d06051df66e9ab753683d7423fcef873d78bb0c33bd112c3d5be66d529eddf06 category: main optional: false - name: pyasn1-modules @@ -12608,11 +12284,11 @@ package: platform: linux-64 dependencies: pyasn1: '>=0.4.6,<0.7.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_1.conda hash: - md5: f781c31cdff5c086909f8037ed0b0472 - sha256: 2dd9d70e055cdc51b5b2dcf3f0e9c0c44599b6155928033886f4efebfdda03f3 + md5: 1c6476fdb96e6c3db6c3f7693cdba78e + sha256: 565e961fce215ccf14f863c3030eda5b83014489679d27166ff97144bf977810 category: main optional: false - name: pyasn1-modules @@ -12620,12 +12296,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' pyasn1: '>=0.4.6,<0.7.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/pyasn1-modules-0.4.1-pyhd8ed1ab_1.conda hash: - md5: f781c31cdff5c086909f8037ed0b0472 - sha256: 2dd9d70e055cdc51b5b2dcf3f0e9c0c44599b6155928033886f4efebfdda03f3 + md5: 1c6476fdb96e6c3db6c3f7693cdba78e + sha256: 565e961fce215ccf14f863c3030eda5b83014489679d27166ff97144bf977810 category: main optional: false - name: pycparser @@ -12633,11 +12309,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda hash: - md5: 844d9eb3b43095b031874477f7d70088 - sha256: 406001ebf017688b1a1554b49127ca3a4ac4626ec0fd51dc75ffa4415b720b64 + md5: 12c566707c80111f9799308d9e265aef + sha256: 79db7928d13fab2d892592223d7570f5061c192f27b9febd1a418427b719acc6 category: main optional: false - name: pycparser @@ -12645,45 +12321,47 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda hash: - md5: 844d9eb3b43095b031874477f7d70088 - sha256: 406001ebf017688b1a1554b49127ca3a4ac4626ec0fd51dc75ffa4415b720b64 + md5: 12c566707c80111f9799308d9e265aef + sha256: 79db7928d13fab2d892592223d7570f5061c192f27b9febd1a418427b719acc6 category: main optional: false - name: pydantic - version: 2.9.2 + version: 2.10.3 manager: conda platform: linux-64 dependencies: annotated-types: '>=0.6.0' - pydantic-core: 2.23.4 - python: '>=3.7' + pydantic-core: 2.27.1 + python: '>=3.9' typing-extensions: '>=4.6.1' - url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + typing_extensions: '>=4.12.2' + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.10.3-pyh3cfb1c2_0.conda hash: - md5: 1eb533bb8eb2199e3fef3e4aa147319f - sha256: 1b7b0dc9f6af4da156bf22b0263be70829364a08145c696d3670facff2f6441a + md5: 194ef7f91286978521350f171b117f01 + sha256: cac9eebd3d5f8d8a497a9025d756257ddc75b8b3393e6737cb45077bd744d4f8 category: main optional: false - name: pydantic - version: 2.9.2 + version: 2.10.3 manager: conda platform: osx-arm64 dependencies: - annotated-types: '>=0.6.0' - pydantic-core: 2.23.4 - python: '>=3.7' + python: '>=3.9' typing-extensions: '>=4.6.1' - url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.9.2-pyhd8ed1ab_0.conda + typing_extensions: '>=4.12.2' + annotated-types: '>=0.6.0' + pydantic-core: 2.27.1 + url: https://conda.anaconda.org/conda-forge/noarch/pydantic-2.10.3-pyh3cfb1c2_0.conda hash: - md5: 1eb533bb8eb2199e3fef3e4aa147319f - sha256: 1b7b0dc9f6af4da156bf22b0263be70829364a08145c696d3670facff2f6441a + md5: 194ef7f91286978521350f171b117f01 + sha256: cac9eebd3d5f8d8a497a9025d756257ddc75b8b3393e6737cb45077bd744d4f8 category: main optional: false - name: pydantic-core - version: 2.23.4 + version: 2.27.1 manager: conda platform: linux-64 dependencies: @@ -12692,14 +12370,14 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '>=4.6.0,!=4.7.0' - url: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.23.4-py39he612d8f_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/pydantic-core-2.27.1-py39he612d8f_0.conda hash: - md5: 2c6c72bcef2551b31798b3f16289b4eb - sha256: f233c7d8837355d746e41c04de9a017526216cdb4cc5a3c8d71c060bf12b0fb7 + md5: 08ae181e881f3bef5598b300353b57ba + sha256: 701f663eda9d5cc92bf67b851e72aa9fb4f1287bd375661a3eef3351c7dc9056 category: main optional: false - name: pydantic-core - version: 2.23.4 + version: 2.27.1 manager: conda platform: osx-arm64 dependencies: @@ -12707,10 +12385,10 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '>=4.6.0,!=4.7.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.23.4-py39h9c3e640_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pydantic-core-2.27.1-py39hc40b5db_0.conda hash: - md5: efa7fb4c5d37835cf971525606f19b86 - sha256: de002022f35f48b213fa5694813bedbf08aec66d48c669456fdbc25fc3ef11b2 + md5: 8ab310ed760f566831f7d92ce888cf22 + sha256: 2237a91479eb1b7ae9f332b6c1ebb3a90e06ac2ac3a45f014ce33a27d1f95912 category: main optional: false - name: pydata-google-auth @@ -12733,10 +12411,10 @@ package: manager: conda platform: osx-arm64 dependencies: - google-auth: '>=1.25.0,<3.0' - google-auth-oauthlib: '>=0.4.0' - python: '>=3.8' setuptools: '' + python: '>=3.8' + google-auth-oauthlib: '>=0.4.0' + google-auth: '>=1.25.0,<3.0' url: https://conda.anaconda.org/conda-forge/noarch/pydata-google-auth-1.8.2-pyhd8ed1ab_0.conda hash: md5: 20c47c6bae7d5665c87379ca4da905d9 @@ -12748,11 +12426,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda hash: - md5: b7f5c092b8f9800150d998a71b76d5a1 - sha256: 78267adf4e76d0d64ea2ffab008c501156c108bb08fecb703816fb63e279780b + md5: b38dc0206e2a530e5c2cf11dc086b31a + sha256: 0d6133545f268b2b89c2617c196fc791f365b538d4057ecd636d658c3b1e885d category: main optional: false - name: pygments @@ -12760,35 +12438,35 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda hash: - md5: b7f5c092b8f9800150d998a71b76d5a1 - sha256: 78267adf4e76d0d64ea2ffab008c501156c108bb08fecb703816fb63e279780b + md5: b38dc0206e2a530e5c2cf11dc086b31a + sha256: 0d6133545f268b2b89c2617c196fc791f365b538d4057ecd636d658c3b1e885d category: main optional: false - name: pyjwt - version: 2.9.0 + version: 2.10.1 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.9.0-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.10.1-pyhd8ed1ab_0.conda hash: - md5: 5ba575830ec18d5c51c59f403310e2c7 - sha256: b6f47cd0737cb1f5aca10be771641466ec1a3be585382d44877140eb2cb2dd46 + md5: 84c5c40ea7c5bbc6243556e5daed20e7 + sha256: 158d8911e873e2a339c27768933747bf9c2aec1caa038f1b7b38a011734a956f category: main optional: false - name: pyjwt - version: 2.9.0 + version: 2.10.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.9.0-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pyjwt-2.10.1-pyhd8ed1ab_0.conda hash: - md5: 5ba575830ec18d5c51c59f403310e2c7 - sha256: b6f47cd0737cb1f5aca10be771641466ec1a3be585382d44877140eb2cb2dd46 + md5: 84c5c40ea7c5bbc6243556e5daed20e7 + sha256: 158d8911e873e2a339c27768933747bf9c2aec1caa038f1b7b38a011734a956f category: main optional: false - name: pynacl @@ -12827,7 +12505,7 @@ package: category: main optional: false - name: pyobjc-core - version: 10.3.1 + version: 10.3.2 manager: conda platform: osx-arm64 dependencies: @@ -12836,52 +12514,52 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* setuptools: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-10.3.1-py39hdc109a9_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-core-10.3.2-py39hebff0d6_0.conda hash: - md5: 52acd72bb273b51e65c1d34f1478c89c - sha256: 36a51ab9f88f64b64d4baab0367ba89b6c4349d035d086a9434fdc6a1d3cdba1 + md5: 8293ef383094ef347928de4c588f3861 + sha256: 6dcf84110f5a3abc0a2ab230e8d2c3e57174093c8cdaf56fb4d9304286816aa2 category: main optional: false - name: pyobjc-framework-cocoa - version: 10.3.1 + version: 10.3.2 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' libffi: '>=3.4,<4.0a0' - pyobjc-core: 10.3.1.* + pyobjc-core: 10.3.2.* python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-10.3.1-py39hdc109a9_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pyobjc-framework-cocoa-10.3.2-py39hebff0d6_0.conda hash: - md5: fb9bec50052e656def45b3f47337f0bd - sha256: f4a4b63959f56110c82224a108dd815a3dc47cb5b5d903a34436140d715bf32d + md5: 5d1fb77d91e29cfa119fb535c6b183a0 + sha256: 4eca292e1c1eb0f66bb05aae1e6bb62755bddb59d0c723c51d6cdb9812922669 category: main optional: false - name: pyopenssl - version: 24.2.1 + version: 24.3.0 manager: conda platform: linux-64 dependencies: - cryptography: '>=41.0.5,<44' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.2.1-pyhd8ed1ab_2.conda + cryptography: '>=41.0.5,<45' + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.3.0-pyhd8ed1ab_0.conda hash: - md5: 85fa2fdd26d5a38792eb57bc72463f07 - sha256: 6618aaa9780b723abfda95f3575900df99dd137d96c80421ad843a5cbcc70e6e + md5: 269109707b3810adce78b6afb2a82c80 + sha256: a84489a5b4acf8af81dcf0b28542c7bb6362dc87990a51bf08e2fdeb51f4ee9f category: main optional: false - name: pyopenssl - version: 24.2.1 + version: 24.3.0 manager: conda platform: osx-arm64 dependencies: - cryptography: '>=41.0.5,<44' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.2.1-pyhd8ed1ab_2.conda + python: '>=3.9' + cryptography: '>=41.0.5,<45' + url: https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.3.0-pyhd8ed1ab_0.conda hash: - md5: 85fa2fdd26d5a38792eb57bc72463f07 - sha256: 6618aaa9780b723abfda95f3575900df99dd137d96c80421ad843a5cbcc70e6e + md5: 269109707b3810adce78b6afb2a82c80 + sha256: a84489a5b4acf8af81dcf0b28542c7bb6362dc87990a51bf08e2fdeb51f4ee9f category: main optional: false - name: pyparsing @@ -12890,10 +12568,10 @@ package: platform: linux-64 dependencies: python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda hash: - md5: 035c17fbf099f50ff60bf2eb303b0a83 - sha256: b846e3965cd106438cf0b9dc0de8d519670ac065f822a7d66862e9423e0229cb + md5: 4c05a2bcf87bb495512374143b57cf28 + sha256: 09a5484532e24a33649ab612674fd0857bbdcfd6640a79d13a6690fb742a36e1 category: main optional: false - name: pyparsing @@ -12902,10 +12580,10 @@ package: platform: osx-arm64 dependencies: python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda hash: - md5: 035c17fbf099f50ff60bf2eb303b0a83 - sha256: b846e3965cd106438cf0b9dc0de8d519670ac065f822a7d66862e9423e0229cb + md5: 4c05a2bcf87bb495512374143b57cf28 + sha256: 09a5484532e24a33649ab612674fd0857bbdcfd6640a79d13a6690fb742a36e1 category: main optional: false - name: pyproj @@ -12946,11 +12624,11 @@ package: platform: linux-64 dependencies: __unix: '' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda hash: - md5: 2a7de29fb590ca14b5243c4c812c8025 - sha256: a42f826e958a8d22e65b3394f437af7332610e43ee313393d1cf143f0a2d274b + md5: 461219d1a5bd61342293efa2c0c90eac + sha256: ba3b032fa52709ce0d9fd388f63d330a026754587a2f461117cac9ab73d8d0d8 category: main optional: false - name: pysocks @@ -12959,47 +12637,47 @@ package: platform: osx-arm64 dependencies: __unix: '' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda hash: - md5: 2a7de29fb590ca14b5243c4c812c8025 - sha256: a42f826e958a8d22e65b3394f437af7332610e43ee313393d1cf143f0a2d274b + md5: 461219d1a5bd61342293efa2c0c90eac + sha256: ba3b032fa52709ce0d9fd388f63d330a026754587a2f461117cac9ab73d8d0d8 category: main optional: false - name: pyspark - version: 3.5.1 + version: 3.5.3 manager: conda platform: linux-64 dependencies: - numpy: '>=1.15' + numpy: '>=1.15,<2' pandas: '>=1.0.5' py4j: 0.10.9.7 pyarrow: '>=4.0.0' python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyspark-3.5.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/pyspark-3.5.3-pyhd8ed1ab_0.conda hash: - md5: fc1824942077c7ed5f0e24ff869c6f37 - sha256: 6ba987ac0a2c5c6de98b4ce943e72cfbfca1134678c3984959cdb11070997005 + md5: 5b3821d50c71b7537cb759caf1988742 + sha256: e5eeaae40fd4adc9ca25de11cf16b133cbcaa5e6caa356ec035fdb76379bdb83 category: main optional: false - name: pyspark - version: 3.5.1 + version: 3.5.3 manager: conda platform: osx-arm64 dependencies: - numpy: '>=1.15' + python: '>=3.8' pandas: '>=1.0.5' - py4j: 0.10.9.7 pyarrow: '>=4.0.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/pyspark-3.5.1-pyhd8ed1ab_0.conda + numpy: '>=1.15,<2' + py4j: 0.10.9.7 + url: https://conda.anaconda.org/conda-forge/noarch/pyspark-3.5.3-pyhd8ed1ab_0.conda hash: - md5: fc1824942077c7ed5f0e24ff869c6f37 - sha256: 6ba987ac0a2c5c6de98b4ce943e72cfbfca1134678c3984959cdb11070997005 + md5: 5b3821d50c71b7537cb759caf1988742 + sha256: e5eeaae40fd4adc9ca25de11cf16b133cbcaa5e6caa356ec035fdb76379bdb83 category: main optional: false - name: python - version: 3.9.20 + version: 3.9.21 manager: conda platform: linux-64 dependencies: @@ -13008,93 +12686,93 @@ package: ld_impl_linux-64: '>=2.36.1' libffi: '>=3.4,<4.0a0' libgcc: '>=13' + liblzma: '>=5.6.3,<6.0a0' libnsl: '>=2.0.1,<2.1.0a0' - libsqlite: '>=3.46.1,<4.0a0' + libsqlite: '>=3.47.0,<4.0a0' libuuid: '>=2.38.1,<3.0a0' libxcrypt: '>=4.4.36' libzlib: '>=1.3.1,<2.0a0' ncurses: '>=6.5,<7.0a0' - openssl: '>=3.3.2,<4.0a0' + openssl: '>=3.4.0,<4.0a0' readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' tzdata: '' - xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.20-h13acc7a_1_cpython.conda + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.21-h9c0c6dc_1_cpython.conda hash: - md5: 951cff166a5f170e27908811917165f8 - sha256: 6a30aa8df1745eded1e5c24d167cb10e6f379e75d2f2fa2a212e6dab76030698 + md5: b4807744af026fdbe8c05131758fb4be + sha256: 06042ce946a64719b5ce1676d02febc49a48abcab16ef104e27d3ec11e9b1855 category: main optional: false - name: python - version: 3.9.20 + version: 3.9.21 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' bzip2: '>=1.0.8,<2.0a0' libffi: '>=3.4,<4.0a0' - libsqlite: '>=3.46.1,<4.0a0' + liblzma: '>=5.6.3,<6.0a0' + libsqlite: '>=3.47.0,<4.0a0' libzlib: '>=1.3.1,<2.0a0' ncurses: '>=6.5,<7.0a0' - openssl: '>=3.3.2,<4.0a0' + openssl: '>=3.4.0,<4.0a0' readline: '>=8.2,<9.0a0' tk: '>=8.6.13,<8.7.0a0' tzdata: '' - xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.9.20-h9e33284_1_cpython.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.9.21-h5f1b60f_1_cpython.conda hash: - md5: 708bd3a3616e42becb50d77313def984 - sha256: d6c272faa05fb7524aaf59718fa27629b1875e5dfb2fa74100547e8564cce4bc + md5: a7ec592ce8aefc5a681d2c5b8e005a54 + sha256: e9f80120e6bbb6fcbe29eb4afb1fc06c0a9b2802a13114cf7c823fce284f4ebb category: main optional: false - name: python-dateutil - version: 2.9.0 + version: 2.9.0.post0 manager: conda platform: linux-64 dependencies: - python: '>=3.7' + python: '>=3.9' six: '>=1.5' - url: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda hash: - md5: 2cf4264fffb9e6eff6031c5b6884d61c - sha256: f3ceef02ac164a8d3a080d0d32f8e2ebe10dd29e3a685d240e38b3599e146320 + md5: 5ba79d7c71f03c678c8ead841f347d6e + sha256: a50052536f1ef8516ed11a844f9413661829aa083304dc624c5925298d078d79 category: main optional: false - name: python-dateutil - version: 2.9.0 + version: 2.9.0.post0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' + python: '>=3.9' six: '>=1.5' - url: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda hash: - md5: 2cf4264fffb9e6eff6031c5b6884d61c - sha256: f3ceef02ac164a8d3a080d0d32f8e2ebe10dd29e3a685d240e38b3599e146320 + md5: 5ba79d7c71f03c678c8ead841f347d6e + sha256: a50052536f1ef8516ed11a844f9413661829aa083304dc624c5925298d078d79 category: main optional: false - name: python-fastjsonschema - version: 2.20.0 + version: 2.21.1 manager: conda platform: linux-64 dependencies: - python: '>=3.3' - url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda hash: - md5: b98d2018c01ce9980c03ee2850690fab - sha256: 7d8c931b89c9980434986b4deb22c2917b58d9936c3974139b9c10ae86fdfe60 + md5: 38e34d2d1d9dca4fb2b9a0a04f604e2c + sha256: 1b09a28093071c1874862422696429d0d35bd0b8420698003ac004746c5e82a2 category: main optional: false - name: python-fastjsonschema - version: 2.20.0 + version: 2.21.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.3' - url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda hash: - md5: b98d2018c01ce9980c03ee2850690fab - sha256: 7d8c931b89c9980434986b4deb22c2917b58d9936c3974139b9c10ae86fdfe60 + md5: 38e34d2d1d9dca4fb2b9a0a04f604e2c + sha256: 1b09a28093071c1874862422696429d0d35bd0b8420698003ac004746c5e82a2 category: main optional: false - name: python-flatbuffers @@ -13102,11 +12780,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/python-flatbuffers-24.3.25-pyh59ac667_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/python-flatbuffers-24.3.25-pyhe33e51e_1.conda hash: - md5: dfc884dcd61ff6543fde37a41b7d7f31 - sha256: 6a9d285fef959480eccbc69e276ede64e292c8eee35ddc727d5a0fb9a4bcc3a2 + md5: f1b4b2cbba87bc4ae6706bdab332450c + sha256: ad32145c4e48cf24e3eb29f8022f9b320b8a3e30d1154277075f36c9a3911611 category: main optional: false - name: python-flatbuffers @@ -13114,11 +12792,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/python-flatbuffers-24.3.25-pyh59ac667_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/python-flatbuffers-24.3.25-pyhe33e51e_1.conda hash: - md5: dfc884dcd61ff6543fde37a41b7d7f31 - sha256: 6a9d285fef959480eccbc69e276ede64e292c8eee35ddc727d5a0fb9a4bcc3a2 + md5: f1b4b2cbba87bc4ae6706bdab332450c + sha256: ad32145c4e48cf24e3eb29f8022f9b320b8a3e30d1154277075f36c9a3911611 category: main optional: false - name: python-json-logger @@ -13168,23 +12846,23 @@ package: sha256: 0a856358b2fca9e52deed53edff2e36c73bd864a573d09210abaac08215ef0f8 category: main optional: false -- name: python-kubernetes - version: 31.0.0 - manager: conda - platform: osx-arm64 - dependencies: - certifi: '>=14.05.14' - durationpy: '>=0.7' - google-auth: '>=1.0.1' - oauthlib: '>=3.2.2' - python: '>=3.7' - python-dateutil: '>=2.5.3' - pyyaml: '>=5.4.1' +- name: python-kubernetes + version: 31.0.0 + manager: conda + platform: osx-arm64 + dependencies: requests: '' requests-oauthlib: '' + python: '>=3.7' six: '>=1.9.0' + pyyaml: '>=5.4.1' + python-dateutil: '>=2.5.3' + certifi: '>=14.05.14' urllib3: '>=1.24.2' + google-auth: '>=1.0.1' websocket-client: '>=0.32.0,!=0.40.0,!=0.41.*,!=0.42.*' + oauthlib: '>=3.2.2' + durationpy: '>=0.7' url: https://conda.anaconda.org/conda-forge/noarch/python-kubernetes-31.0.0-pyhd8ed1ab_0.conda hash: md5: 5f534dd7e9098283a4c5a4a8763e8675 @@ -13196,11 +12874,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda hash: - md5: 986287f89929b2d629bd6ef6497dc307 - sha256: fe3f62ce2bc714bdaa222ab3f0344a2815ad9e853c6df38d15c9f25de8a3a6d4 + md5: c0def296b2f6d2dd7b030c2a7f66bb1f + sha256: 57c9a02ec25926fb48edca59b9ede107823e5d5c473b94a0e05cc0b9a193a642 category: main optional: false - name: python-tzdata @@ -13208,37 +12886,37 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda hash: - md5: 986287f89929b2d629bd6ef6497dc307 - sha256: fe3f62ce2bc714bdaa222ab3f0344a2815ad9e853c6df38d15c9f25de8a3a6d4 + md5: c0def296b2f6d2dd7b030c2a7f66bb1f + sha256: 57c9a02ec25926fb48edca59b9ede107823e5d5c473b94a0e05cc0b9a193a642 category: main optional: false - name: python-utils - version: 3.8.2 + version: 3.9.1 manager: conda platform: linux-64 dependencies: - python: '>=3.8' + python: '>=3.9' typing_extensions: '>3.10.0.2' - url: https://conda.anaconda.org/conda-forge/noarch/python-utils-3.8.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-utils-3.9.1-pyhff2d567_0.conda hash: - md5: 89703b4f38bd1c0353881f085bc8fdaa - sha256: 56aac9317cde48fc8ff59806587afd4d1c262dcd7598f94c0748a2ec51523d09 + md5: 3ec7a7dd072707e61baac6d474349958 + sha256: 0b86419e6efa61738e3cd647d475b40e5d7facd53b30febe9d7dac15b51741df category: main optional: false - name: python-utils - version: 3.8.2 + version: 3.9.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' + python: '>=3.9' typing_extensions: '>3.10.0.2' - url: https://conda.anaconda.org/conda-forge/noarch/python-utils-3.8.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-utils-3.9.1-pyhff2d567_0.conda hash: - md5: 89703b4f38bd1c0353881f085bc8fdaa - sha256: 56aac9317cde48fc8ff59806587afd4d1c262dcd7598f94c0748a2ec51523d09 + md5: 3ec7a7dd072707e61baac6d474349958 + sha256: 0b86419e6efa61738e3cd647d475b40e5d7facd53b30febe9d7dac15b51741df category: main optional: false - name: python-xxhash @@ -13337,13 +13015,13 @@ package: filelock: '' fsspec: '' jinja2: '' - libabseil: '>=20240116.2,<20240117.0a0' + libabseil: '>=20240722.0,<20240723.0a0' libcblas: '>=3.9.0,<4.0a0' libcxx: '>=17' liblapack: '>=3.9.0,<4.0a0' - libprotobuf: '>=4.25.3,<4.25.4.0a0' + libprotobuf: '>=5.27.5,<5.27.6.0a0' libtorch: 2.4.1.* - libuv: '>=1.49.0,<2.0a0' + libuv: '>=1.49.1,<2.0a0' llvm-openmp: '>=17.0.6' networkx: '' nomkl: '' @@ -13353,10 +13031,10 @@ package: sleef: '>=3.7,<4.0a0' sympy: '>=1.13.1' typing_extensions: '>=4.8.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pytorch-2.4.1-cpu_generic_py39h1c1d8dc_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pytorch-2.4.1-cpu_generic_py39hd3c670c_2.conda hash: - md5: 3fdd7e67136bddb21884718e35b5766b - sha256: 6e01aa47827f968563e43b3483479f187bf7bd4a11a74a35aa34de4d9945411b + md5: ca92192bc91b324aaab5f3c73de64884 + sha256: 4b41b41d5247e4b58dca29c1160d25783c747495bdcc92a314fc5958e988b9e9 category: main optional: false - name: pytz @@ -13388,12 +13066,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=2.7' + python: '>=3.9' six: '' - url: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_1.conda hash: - md5: caabbeaa83928d0c3e3949261daa18eb - sha256: 667a5a30b65a60b15f38fa4cb09efd6d2762b5a0a9563acd9555eaa5e0b953a2 + md5: 644bd4ca9f68ef536b902685d773d697 + sha256: 991caa5408aea018488a2c94e915c11792b9321b0ef64401f4829ebd0abfb3c0 category: main optional: false - name: pyu2f @@ -13401,12 +13079,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=2.7' six: '' - url: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/pyu2f-0.1.5-pyhd8ed1ab_1.conda hash: - md5: caabbeaa83928d0c3e3949261daa18eb - sha256: 667a5a30b65a60b15f38fa4cb09efd6d2762b5a0a9563acd9555eaa5e0b953a2 + md5: 644bd4ca9f68ef536b902685d773d697 + sha256: 991caa5408aea018488a2c94e915c11792b9321b0ef64401f4829ebd0abfb3c0 category: main optional: false - name: pywin32-on-windows @@ -13547,15 +13225,26 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=2.7' requests: '' six: '' + python: '>=2.7' url: https://conda.anaconda.org/conda-forge/noarch/querystring_parser-1.2.4-pyhd8ed1ab_1.conda hash: md5: 124ce2752ddf974efebd074b53675b83 sha256: 7b0b83da00e1e0d22e4d4f26e58844875c5b6a606953d9da65afb0ca015aaaae category: main optional: false +- name: rav1e + version: 0.6.6 + manager: conda + platform: osx-arm64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/osx-arm64/rav1e-0.6.6-h69fbcac_2.conda + hash: + md5: e309ae86569b1cd55a0285fa4e939844 + sha256: be6174970193cb4d0ffa7d731a93a4c9542881dbc7ab24e74b460ef312161169 + category: main + optional: false - name: rdma-core version: '28.9' manager: conda @@ -13584,15 +13273,15 @@ package: category: main optional: false - name: re2 - version: 2023.09.01 + version: 2024.07.02 manager: conda platform: osx-arm64 dependencies: - libre2-11: 2023.09.01 - url: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2023.09.01-h4cba328_2.conda + libre2-11: 2024.07.02 + url: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2024.07.02-hcd0e937_1.conda hash: - md5: 0342882197116478a42fa4ea35af79c1 - sha256: 0e0d44414381c39a7e6f3da442cb41c637df0dcb383a07425f19c19ccffa0118 + md5: 19e29f2ccc9168eb0a39dc40c04c0e21 + sha256: eebddde6cb10b146507810b701ef6df122d5309cd5151a39d0828aa44dc53725 category: main optional: false - name: readline @@ -13640,9 +13329,9 @@ package: manager: conda platform: osx-arm64 dependencies: - commonmark: '>=0.8.1' - docutils: '>=0.11' python: '>=3' + docutils: '>=0.11' + commonmark: '>=0.8.1' sphinx: '>=1.3.1' url: https://conda.anaconda.org/conda-forge/noarch/recommonmark-0.7.1-pyhd8ed1ab_0.tar.bz2 hash: @@ -13656,12 +13345,12 @@ package: platform: linux-64 dependencies: attrs: '>=22.2.0' - python: '>=3.8' + python: '>=3.9' rpds-py: '>=0.7.0' - url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_1.conda hash: - md5: 0fc8b52192a8898627c3efae1003e9f6 - sha256: be8d6d9e86b1a3fef5424127ff81782f8ca63d3058980859609f6f1ecdd34cb3 + md5: 8c9083612c1bfe6878715ed5732605f8 + sha256: f972eecb4dc8e06257af37642f92b0f2df04a7fe4c950f2e1045505e5e93985f category: main optional: false - name: referencing @@ -13669,13 +13358,13 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' attrs: '>=22.2.0' - python: '>=3.8' rpds-py: '>=0.7.0' - url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_1.conda hash: - md5: 0fc8b52192a8898627c3efae1003e9f6 - sha256: be8d6d9e86b1a3fef5424127ff81782f8ca63d3058980859609f6f1ecdd34cb3 + md5: 8c9083612c1bfe6878715ed5732605f8 + sha256: f972eecb4dc8e06257af37642f92b0f2df04a7fe4c950f2e1045505e5e93985f category: main optional: false - name: requests @@ -13686,12 +13375,12 @@ package: certifi: '>=2017.4.17' charset-normalizer: '>=2,<4' idna: '>=2.5,<4' - python: '>=3.8' + python: '>=3.9' urllib3: '>=1.21.1,<3' - url: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda hash: - md5: 5ede4753180c7a550a443c430dc8ab52 - sha256: 5845ffe82a6fa4d437a2eae1e32a1ad308d7ad349f61e337c0a890fe04c513cc + md5: a9b9368f3701a417eac9edbcae7cb737 + sha256: d701ca1136197aa121bbbe0e8c18db6b5c94acbd041c2b43c70e5ae104e1d8ad category: main optional: false - name: requests @@ -13699,15 +13388,15 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' + idna: '>=2.5,<4' certifi: '>=2017.4.17' charset-normalizer: '>=2,<4' - idna: '>=2.5,<4' - python: '>=3.8' urllib3: '>=1.21.1,<3' - url: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda hash: - md5: 5ede4753180c7a550a443c430dc8ab52 - sha256: 5845ffe82a6fa4d437a2eae1e32a1ad308d7ad349f61e337c0a890fe04c513cc + md5: a9b9368f3701a417eac9edbcae7cb737 + sha256: d701ca1136197aa121bbbe0e8c18db6b5c94acbd041c2b43c70e5ae104e1d8ad category: main optional: false - name: requests-oauthlib @@ -13716,12 +13405,12 @@ package: platform: linux-64 dependencies: oauthlib: '>=3.0.0' - python: '>=3.4' + python: '>=3.9' requests: '>=2.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_1.conda hash: - md5: 87ce3f09ae7e1d3d0f748a1a634ea3b7 - sha256: 3d2b0ad106ad5745445c2eb7e7f90b0ce75dc9f4d8c518eb6fd75aad3c80c2cc + md5: a283b764d8b155f81e904675ef5e1f4b + sha256: 75ef0072ae6691f5ca9709fe6a2570b98177b49d0231a6749ac4e610da934cab category: main optional: false - name: requests-oauthlib @@ -13729,13 +13418,13 @@ package: manager: conda platform: osx-arm64 dependencies: - oauthlib: '>=3.0.0' - python: '>=3.4' + python: '>=3.9' requests: '>=2.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_0.conda + oauthlib: '>=3.0.0' + url: https://conda.anaconda.org/conda-forge/noarch/requests-oauthlib-2.0.0-pyhd8ed1ab_1.conda hash: - md5: 87ce3f09ae7e1d3d0f748a1a634ea3b7 - sha256: 3d2b0ad106ad5745445c2eb7e7f90b0ce75dc9f4d8c518eb6fd75aad3c80c2cc + md5: a283b764d8b155f81e904675ef5e1f4b + sha256: 75ef0072ae6691f5ca9709fe6a2570b98177b49d0231a6749ac4e610da934cab category: main optional: false - name: rfc3339-validator @@ -13743,12 +13432,12 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.5' + python: '>=3.9' six: '' - url: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda hash: - md5: fed45fc5ea0813240707998abe49f520 - sha256: 7c7052b51de0b5c558f890bb11f8b5edbb9934a653d76be086b1182b9f54185d + md5: 36de09a8d3e5d5e6f4ee63af49e59706 + sha256: 2e4372f600490a6e0b3bac60717278448e323cab1c0fecd5f43f7c56535a99c5 category: main optional: false - name: rfc3339-validator @@ -13756,12 +13445,12 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' six: '' - url: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/rfc3339-validator-0.1.4-pyhd8ed1ab_1.conda hash: - md5: fed45fc5ea0813240707998abe49f520 - sha256: 7c7052b51de0b5c558f890bb11f8b5edbb9934a653d76be086b1182b9f54185d + md5: 36de09a8d3e5d5e6f4ee63af49e59706 + sha256: 2e4372f600490a6e0b3bac60717278448e323cab1c0fecd5f43f7c56535a99c5 category: main optional: false - name: rfc3986-validator @@ -13793,11 +13482,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/rfc3987-1.3.8-py_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/rfc3987-1.3.8-pyhd8ed1ab_1.conda hash: - md5: 20e7b2ee8ae949f645dd4bc0d8db42f7 - sha256: 06c631116c0298154ae693f51f1777b841e6c13f70351215824b2028b91f98d2 + md5: ac873606a0ad5d2718ed87c1786aaa46 + sha256: 9d99487a9b8099c0ae29951fa0857414808ab2e710ad376625ef4f156c34b2e3 category: main optional: false - name: rfc3987 @@ -13805,45 +13494,45 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/rfc3987-1.3.8-py_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/rfc3987-1.3.8-pyhd8ed1ab_1.conda hash: - md5: 20e7b2ee8ae949f645dd4bc0d8db42f7 - sha256: 06c631116c0298154ae693f51f1777b841e6c13f70351215824b2028b91f98d2 + md5: ac873606a0ad5d2718ed87c1786aaa46 + sha256: 9d99487a9b8099c0ae29951fa0857414808ab2e710ad376625ef4f156c34b2e3 category: main optional: false - name: rich - version: 13.9.2 + version: 13.9.4 manager: conda platform: linux-64 dependencies: markdown-it-py: '>=2.2.0' pygments: '>=2.13.0,<3.0.0' - python: '>=3.8' + python: '>=3.9' typing_extensions: '>=4.0.0,<5.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda hash: - md5: e56b63ff450389ba95a86e97816de7a4 - sha256: 7d481312e97df9ab914151c8294caff4a48f6427e109715445897166435de2ff + md5: 7aed65d4ff222bfb7335997aa40b7da5 + sha256: 06a760c5ae572e72e865d5a87e9fe3cc171e1a9c996e63daf3db52ff1a0b4457 category: main optional: false - name: rich - version: 13.9.2 + version: 13.9.4 manager: conda platform: osx-arm64 dependencies: - markdown-it-py: '>=2.2.0' - pygments: '>=2.13.0,<3.0.0' - python: '>=3.8' + python: '>=3.9' typing_extensions: '>=4.0.0,<5.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.2-pyhd8ed1ab_0.conda + pygments: '>=2.13.0,<3.0.0' + markdown-it-py: '>=2.2.0' + url: https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda hash: - md5: e56b63ff450389ba95a86e97816de7a4 - sha256: 7d481312e97df9ab914151c8294caff4a48f6427e109715445897166435de2ff + md5: 7aed65d4ff222bfb7335997aa40b7da5 + sha256: 06a760c5ae572e72e865d5a87e9fe3cc171e1a9c996e63daf3db52ff1a0b4457 category: main optional: false - name: rpds-py - version: 0.20.0 + version: 0.22.3 manager: conda platform: linux-64 dependencies: @@ -13851,24 +13540,24 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py39he612d8f_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.22.3-py39he612d8f_0.conda hash: - md5: 62e75be4755fb727a7d2ac81dab33293 - sha256: fb94904c6e4cb513e4517e2db5a979cae3fa173a673a1dc7b98eff6603b1236b + md5: f78f4ac18603f12bcabec0219df9ea15 + sha256: c6511ecfa2ed7ee728b58926cfa14b830a7301cd5a0bd9062e6bc085f226ec4d category: main optional: false - name: rpds-py - version: 0.20.0 + version: 0.22.3 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.20.0-py39h9c3e640_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.22.3-py39hc40b5db_0.conda hash: - md5: 13d0266dbdd7df0fdc3c3f12fc36767a - sha256: cd251231f8487e5736ef2a128c5f58920abb11962cd12d6a734b240c9569485d + md5: 356cce0b56e7931b2874e87b7247a292 + sha256: d48dfdaac5494924f47c093af5c529831eec07355933c1e2667c07e8b25bdb32 category: main optional: false - name: rsa @@ -13877,11 +13566,11 @@ package: platform: linux-64 dependencies: pyasn1: '>=0.1.3' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_1.conda hash: - md5: 03bf410858b2cefc267316408a77c436 - sha256: 23214cdc15a41d14136754857fd9cd46ca3c55a7e751da3b3a48c673f0ee2a57 + md5: 91def14612d11100329d53a75993a4d5 + sha256: 210ff0e3aaa8ce8e9d45a5fd578ce7b2d5bcd7d3054dc779c3a159b8f72104d6 category: main optional: false - name: rsa @@ -13889,12 +13578,12 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' pyasn1: '>=0.1.3' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/rsa-4.9-pyhd8ed1ab_1.conda hash: - md5: 03bf410858b2cefc267316408a77c436 - sha256: 23214cdc15a41d14136754857fd9cd46ca3c55a7e751da3b3a48c673f0ee2a57 + md5: 91def14612d11100329d53a75993a4d5 + sha256: 210ff0e3aaa8ce8e9d45a5fd578ce7b2d5bcd7d3054dc779c3a159b8f72104d6 category: main optional: false - name: rtree @@ -13999,33 +13688,33 @@ package: category: main optional: false - name: s3transfer - version: 0.10.3 + version: 0.10.4 manager: conda platform: linux-64 dependencies: botocore: '>=1.33.2,<2.0a.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/s3transfer-0.10.3-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/s3transfer-0.10.4-pyhd8ed1ab_1.conda hash: - md5: 0878f8e10cb8b4e069d27db48b95c3b5 - sha256: a8d6061e31cd4e315b26ab1f6a74c618c930d3e14eb3b7c82e4077a11eae2141 + md5: ed873ecbcf00825b51ae5a272083ef2d + sha256: 7903fe87708f151bd2a2782a8ed1714369feadcf4954ed724d1cce0798766399 category: main optional: false - name: s3transfer - version: 0.10.3 + version: 0.10.4 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' botocore: '>=1.33.2,<2.0a.0' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/s3transfer-0.10.3-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/s3transfer-0.10.4-pyhd8ed1ab_1.conda hash: - md5: 0878f8e10cb8b4e069d27db48b95c3b5 - sha256: a8d6061e31cd4e315b26ab1f6a74c618c930d3e14eb3b7c82e4077a11eae2141 + md5: ed873ecbcf00825b51ae5a272083ef2d + sha256: 7903fe87708f151bd2a2782a8ed1714369feadcf4954ed724d1cce0798766399 category: main optional: false - name: scikit-learn - version: 1.5.2 + version: 1.6.0 manager: conda platform: linux-64 dependencies: @@ -14039,30 +13728,30 @@ package: python_abi: 3.9.* scipy: '' threadpoolctl: '>=3.1.0' - url: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py39h4b7350c_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.6.0-py39h4b7350c_0.conda hash: - md5: ee5943d546a2b573f7975ea656e9f54e - sha256: ff2b7cb7745899cad3d8093cb2d757c6ce472f8ff170b43cd43cfd60a7da94c6 + md5: 5aaca81ca321c1a41ba2dbba7a9ec449 + sha256: cb2911c0c91d8ad0bfb2b0f3e44181c74c5e58c6c7fe9fb4b287989b2fb48a36 category: main optional: false - name: scikit-learn - version: 1.5.2 + version: 1.6.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' joblib: '>=1.2.0' - libcxx: '>=17' - llvm-openmp: '>=17.0.6' + libcxx: '>=18' + llvm-openmp: '>=18.1.8' numpy: '>=1.19,<3' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* scipy: '' threadpoolctl: '>=3.1.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.5.2-py39h4704dc7_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.6.0-py39h451895d_0.conda hash: - md5: 741da1d299a6a63d4a62200ccecaad56 - sha256: dee83177a3527497b5e60031502321c39700134a7a3d04f10918cd0e2386dbee + md5: 5fd2bf23c87dc04386fc60a01a2b78f6 + sha256: 6c80329eb9ae872c6ef7dfdfe086f12e3fe4e3c321869e70dd113b4c0f9a035e category: main optional: false - name: scipy @@ -14113,11 +13802,11 @@ package: platform: linux-64 dependencies: __linux: '' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_1.conda hash: - md5: 778594b20097b5a948c59e50ae42482a - sha256: c4401b071e86ddfa0ea4f34b85308db2516b6aeca50053535996864cfdee7b3f + md5: 938c8de6b9de091997145b3bf25cdbf9 + sha256: 00926652bbb8924e265caefdb1db100f86a479e8f1066efe395d5552dde54d02 category: main optional: false - name: send2trash @@ -14127,43 +13816,43 @@ package: dependencies: __osx: '' pyobjc-framework-cocoa: '' - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_1.conda hash: - md5: c3cb67fc72fb38020fe7923dbbcf69b0 - sha256: f911307db932c92510da6c3c15b461aef935720776643a1fbf3683f61001068b + md5: e67b1b1fa7a79ff9e8e326d0caf55854 + sha256: 5282eb5b462502c38df8cb37cd1542c5bbe26af2453a18a0a0602d084ca39f53 category: main optional: false - name: sentry-sdk - version: 2.16.0 + version: 2.19.2 manager: conda platform: linux-64 dependencies: certifi: '' python: '>=3.7' urllib3: '>=1.25.7' - url: https://conda.anaconda.org/conda-forge/noarch/sentry-sdk-2.16.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sentry-sdk-2.19.2-pyhd8ed1ab_0.conda hash: - md5: 187f411001806ee0fbae245308ad824b - sha256: 04ba08e856397c1f83a021ba80ec5291583298be3ed3af91184a497a13bc1fe6 + md5: 0c78be96871bacde6d22402a8c430200 + sha256: 806e251cbe9ee962ca0443413ae92cf37e16549a05e60139d813d983b6a27606 category: main optional: false - name: sentry-sdk - version: 2.16.0 + version: 2.19.2 manager: conda platform: osx-arm64 dependencies: certifi: '' python: '>=3.7' urllib3: '>=1.25.7' - url: https://conda.anaconda.org/conda-forge/noarch/sentry-sdk-2.16.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sentry-sdk-2.19.2-pyhd8ed1ab_0.conda hash: - md5: 187f411001806ee0fbae245308ad824b - sha256: 04ba08e856397c1f83a021ba80ec5291583298be3ed3af91184a497a13bc1fe6 + md5: 0c78be96871bacde6d22402a8c430200 + sha256: 806e251cbe9ee962ca0443413ae92cf37e16549a05e60139d813d983b6a27606 category: main optional: false - name: setproctitle - version: 1.3.3 + version: 1.3.4 manager: conda platform: linux-64 dependencies: @@ -14171,48 +13860,48 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/setproctitle-1.3.3-py39h8cd3c5a_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/setproctitle-1.3.4-py39h8cd3c5a_0.conda hash: - md5: 2a9f478e1fa44e2dcecf003902e47410 - sha256: 8cd9b68d90efdb25a4fca677e616c2abd4a7c2b04187ebbb193fa8a5ac18f82e + md5: e72a4fef076cfbc008e79599f4548bd8 + sha256: 8e295dce1bfd2e6a182d3df4cc93db86d48c03f6a6f8b5b5d8d36d88f5b2e461 category: main optional: false - name: setproctitle - version: 1.3.3 + version: 1.3.4 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/setproctitle-1.3.3-py39h06df861_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/setproctitle-1.3.4-py39hf3bc14e_0.conda hash: - md5: eb961c7e227216f384a7e6b76a8d7ac4 - sha256: 28f125a20417b35647526b929b28b2b96fd60d3712e469bb241d373859c8b46d + md5: 2fe7252c8b0451c8134e4d7ddca9f40c + sha256: 94460f60de18d4a923cab81f5f0b3ba7e5c7007cb67932d2ebe2f2d07e7ce691 category: main optional: false - name: setuptools - version: 75.1.0 + version: 75.6.0 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda hash: - md5: d5cd48392c67fb6849ba459c2c2b671f - sha256: 6725235722095c547edd24275053c615158d6163f396550840aebd6e209e4738 + md5: fc80f7995e396cbaeabd23cf46c413dc + sha256: abb12e1dd515b13660aacb5d0fd43835bc2186cab472df25b7716cd65e095111 category: main optional: false - name: setuptools - version: 75.1.0 + version: 75.6.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda hash: - md5: d5cd48392c67fb6849ba459c2c2b671f - sha256: 6725235722095c547edd24275053c615158d6163f396550840aebd6e209e4738 + md5: fc80f7995e396cbaeabd23cf46c413dc + sha256: abb12e1dd515b13660aacb5d0fd43835bc2186cab472df25b7716cd65e095111 category: main optional: false - name: shapely @@ -14277,27 +13966,27 @@ package: category: main optional: false - name: six - version: 1.16.0 + version: 1.17.0 manager: conda platform: linux-64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda hash: - md5: e5f25f8dbc060e9a8d912e432202afc2 - sha256: a85c38227b446f42c5b90d9b642f2c0567880c15d72492d8da074a59c8f91dd6 + md5: a451d576819089b0d672f18768be0f65 + sha256: 41db0180680cc67c3fa76544ffd48d6a5679d96f4b71d7498a759e94edc9a2db category: main optional: false - name: six - version: 1.16.0 + version: 1.17.0 manager: conda platform: osx-arm64 dependencies: - python: '' - url: https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda hash: - md5: e5f25f8dbc060e9a8d912e432202afc2 - sha256: a85c38227b446f42c5b90d9b642f2c0567880c15d72492d8da074a59c8f91dd6 + md5: a451d576819089b0d672f18768be0f65 + sha256: 41db0180680cc67c3fa76544ffd48d6a5679d96f4b71d7498a759e94edc9a2db category: main optional: false - name: skl2onnx @@ -14324,14 +14013,14 @@ package: manager: conda platform: osx-arm64 dependencies: - numpy: '>=1.15' - onnx: '>=1.2.1' - onnxconverter-common: '>=1.7.0' packaging: '' protobuf: '' python: '>=3.6' - scikit-learn: '>=1.1' + numpy: '>=1.15' scipy: '>=1.0' + onnxconverter-common: '>=1.7.0' + scikit-learn: '>=1.1' + onnx: '>=1.2.1' url: https://conda.anaconda.org/conda-forge/noarch/skl2onnx-1.17.0-pyhd8ed1ab_0.conda hash: md5: 361bc57bd79767dce17de303b3499536 @@ -14347,10 +14036,10 @@ package: _openmp_mutex: '>=4.5' libgcc: '>=13' libstdcxx: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/sleef-3.7-h1b44611_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/sleef-3.7-h1b44611_2.conda hash: - md5: f8b9a3928def0a7f4e37c67045542584 - sha256: 4fbd2d6daa2b9cae0ba704cbbbb45a4f2f412e03e2028ec851205f09caa9139a + md5: 4792f3259c6fdc0b730563a85b211dc0 + sha256: 38ad951d30052522693d21b247105744c7c6fb7cefcf41edca36f0688322e76d category: main optional: false - name: sleef @@ -14359,12 +14048,12 @@ package: platform: osx-arm64 dependencies: __osx: '>=11.0' - libcxx: '>=17' - llvm-openmp: '>=17.0.6' - url: https://conda.anaconda.org/conda-forge/osx-arm64/sleef-3.7-h7783ee8_0.conda + libcxx: '>=18' + llvm-openmp: '>=18.1.8' + url: https://conda.anaconda.org/conda-forge/osx-arm64/sleef-3.7-h8391f65_2.conda hash: - md5: cf4b93e9daf2dc16a23d1f9402b34beb - sha256: e3b72bd02545ecd5b6a5f0578eff527f10e718b8807e83ebecb2cba16e764bba + md5: 00ecdc12398192a5a3a4aaf3d5d10a7c + sha256: 244a788a52c611c91c6b2dc73fdbb4a486261d9d321123d76500a99322bae26a category: main optional: false - name: smmap @@ -14410,11 +14099,11 @@ package: platform: osx-arm64 dependencies: __osx: '>=11.0' - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.1-hd02b534_0.conda + libcxx: '>=18' + url: https://conda.anaconda.org/conda-forge/osx-arm64/snappy-1.2.1-h98b9ce2_1.conda hash: - md5: 69d0f9694f3294418ee935da3d5f7272 - sha256: cb7a9440241c6092e0f1c795fdca149c4767023e783eaf9cfebc501f906b4897 + md5: ded86dee325290da2967a3fea3800eb5 + sha256: 4242f95b215127a006eb664fe26ed5a82df87e90cbdbc7ce7ff4971f0720997f category: main optional: false - name: sniffio @@ -14422,11 +14111,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda hash: - md5: 490730480d76cf9c8f8f2849719c6e2b - sha256: bc12100b2d8836b93c55068b463190505b8064d0fc7d025e89f20ebf22fe6c2b + md5: bf7a226e58dfb8346c70df36065d86c9 + sha256: c2248418c310bdd1719b186796ae50a8a77ce555228b6acd32768e2543a15012 category: main optional: false - name: sniffio @@ -14434,11 +14123,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/sniffio-1.3.1-pyhd8ed1ab_1.conda hash: - md5: 490730480d76cf9c8f8f2849719c6e2b - sha256: bc12100b2d8836b93c55068b463190505b8064d0fc7d025e89f20ebf22fe6c2b + md5: bf7a226e58dfb8346c70df36065d86c9 + sha256: c2248418c310bdd1719b186796ae50a8a77ce555228b6acd32768e2543a15012 category: main optional: false - name: snowballstemmer @@ -14466,7 +14155,7 @@ package: category: main optional: false - name: snowflake-connector-python - version: 3.12.2 + version: 3.12.4 manager: conda platform: linux-64 dependencies: @@ -14484,7 +14173,7 @@ package: packaging: '' platformdirs: '>=2.6.0,<5.0.0' pyjwt: <3.0.0 - pyopenssl: '>=16.2.0,<25.0.0' + pyopenssl: '>=22.0.0,<25.0.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pytz: '' @@ -14493,14 +14182,14 @@ package: tomlkit: '' typing_extensions: '>=4.3,<5' urllib3: '>=1.21.1,<2' - url: https://conda.anaconda.org/conda-forge/linux-64/snowflake-connector-python-3.12.2-py39h3b40f6f_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/snowflake-connector-python-3.12.4-py39h3b40f6f_0.conda hash: - md5: 5fd81424af45edbfa4e70d682b3c3964 - sha256: 477e67bc22443f9cd57006145bfdd866e60d9d2bbded25d693a0ed8b2890f043 + md5: a98d19c4d9bc53a8d516fa653174becc + sha256: a2db43a13ed0cd1ed2579676945e907dc25142fecda9022e9b90d32ba4bc95b9 category: main optional: false - name: snowflake-connector-python - version: 3.12.2 + version: 3.12.4 manager: conda platform: osx-arm64 dependencies: @@ -14512,12 +14201,12 @@ package: cryptography: '>=3.1.0' filelock: '>=3.5,<4' idna: '>=2.5,<4' - libcxx: '>=17' + libcxx: '>=18' numpy: '>=1.19,<3' packaging: '' platformdirs: '>=2.6.0,<5.0.0' pyjwt: <3.0.0 - pyopenssl: '>=16.2.0,<25.0.0' + pyopenssl: '>=22.0.0,<25.0.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pytz: '' @@ -14526,10 +14215,10 @@ package: tomlkit: '' typing_extensions: '>=4.3,<5' urllib3: '>=1.21.1,<2' - url: https://conda.anaconda.org/conda-forge/osx-arm64/snowflake-connector-python-3.12.2-py39hc5ad87a_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/snowflake-connector-python-3.12.4-py39hbc50386_0.conda hash: - md5: 699f9fce76ee2054ea404a48c9dc53f2 - sha256: 106d0c3ebcd733d295a5e829d9aea1a001f4623b451888fe463e8fc8787c7988 + md5: a198d2c25ea69529a494d938cb5409b8 + sha256: e352c8e67bab6b3de01590267917276d88daed6cc4729084ec770c98ce96fea8 category: main optional: false - name: sortedcontainers @@ -14580,20 +14269,6 @@ package: sha256: 54ae221033db8fbcd4998ccb07f3c3828b4d77e73b0c72b18c1d6a507059059c category: main optional: false -- name: spdlog - version: 1.14.1 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - fmt: '>=11.0.1,<12.0a0' - libcxx: '>=16' - url: https://conda.anaconda.org/conda-forge/osx-arm64/spdlog-1.14.1-h6d8af72_1.conda - hash: - md5: 4af518b01539da8e4af17aee5fb92639 - sha256: f981d4f3555125cb913be49397892f43c6b747705c0d72cba3676f7d98709f92 - category: main - optional: false - name: sphinx version: 7.4.7 manager: conda @@ -14629,25 +14304,25 @@ package: manager: conda platform: osx-arm64 dependencies: - alabaster: '>=0.7.14,<0.8.dev0' - babel: '>=2.13' + sphinxcontrib-jsmath: '' + sphinxcontrib-applehelp: '' + sphinxcontrib-qthelp: '' + sphinxcontrib-devhelp: '' + python: '>=3.9' colorama: '>=0.4.6' - docutils: '>=0.20,<0.22' + packaging: '>=23.0' + sphinxcontrib-htmlhelp: '>=2.0.0' + tomli: '>=2.0' imagesize: '>=1.3' - importlib-metadata: '>=6.0' jinja2: '>=3.1' - packaging: '>=23.0' + sphinxcontrib-serializinghtml: '>=1.1.9' + babel: '>=2.13' + docutils: '>=0.20,<0.22' pygments: '>=2.17' - python: '>=3.9' requests: '>=2.30.0' snowballstemmer: '>=2.2' - sphinxcontrib-applehelp: '' - sphinxcontrib-devhelp: '' - sphinxcontrib-htmlhelp: '>=2.0.0' - sphinxcontrib-jsmath: '' - sphinxcontrib-qthelp: '' - sphinxcontrib-serializinghtml: '>=1.1.9' - tomli: '>=2.0' + alabaster: '>=0.7.14,<0.8.dev0' + importlib-metadata: '>=6.0' url: https://conda.anaconda.org/conda-forge/noarch/sphinx-7.4.7-pyhd8ed1ab_0.conda hash: md5: c568e260463da2528ecfd7c5a0b41bbd @@ -14655,35 +14330,37 @@ package: category: main optional: false - name: sphinx-autoapi - version: 3.3.2 + version: 3.4.0 manager: conda platform: linux-64 dependencies: astroid: '>=2.7' jinja2: '' - python: '>=3.8' + python: '>=3.9' pyyaml: '' sphinx: '>=6.1.0' - url: https://conda.anaconda.org/conda-forge/noarch/sphinx-autoapi-3.3.2-pyhd8ed1ab_0.conda + stdlib-list: '' + url: https://conda.anaconda.org/conda-forge/noarch/sphinx-autoapi-3.4.0-pyhd8ed1ab_0.conda hash: - md5: 42d1a022b4fd9345c29f81bc12b97252 - sha256: 0571b0da9e50848e532c89c1f3936b1842f0c55077b9a68babbeb1a2395006a5 + md5: 3aefff40eecc4aab8b24fe8861f9f37c + sha256: 42e0caf785346a5c23c3739c93571290a7b53b5450c6bf0a33efc9a239fc91d2 category: main optional: false - name: sphinx-autoapi - version: 3.3.2 + version: 3.4.0 manager: conda platform: osx-arm64 dependencies: - astroid: '>=2.7' - jinja2: '' - python: '>=3.8' pyyaml: '' + jinja2: '' + stdlib-list: '' + python: '>=3.9' + astroid: '>=2.7' sphinx: '>=6.1.0' - url: https://conda.anaconda.org/conda-forge/noarch/sphinx-autoapi-3.3.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinx-autoapi-3.4.0-pyhd8ed1ab_0.conda hash: - md5: 42d1a022b4fd9345c29f81bc12b97252 - sha256: 0571b0da9e50848e532c89c1f3936b1842f0c55077b9a68babbeb1a2395006a5 + md5: 3aefff40eecc4aab8b24fe8861f9f37c + sha256: 42e0caf785346a5c23c3739c93571290a7b53b5450c6bf0a33efc9a239fc91d2 category: main optional: false - name: sphinx-basic-ng @@ -14731,8 +14408,8 @@ package: manager: conda platform: osx-arm64 dependencies: - click: '>=6.0' python: '>=3.6' + click: '>=6.0' sphinx: '>=2.0' url: https://conda.anaconda.org/conda-forge/noarch/sphinx-click-6.0.0-pyhd8ed1ab_0.conda hash: @@ -14793,29 +14470,29 @@ package: category: main optional: false - name: sphinx-issues - version: 1.2.0 + version: 5.0.0 manager: conda platform: linux-64 dependencies: - python: '' + python: '>=3.9' sphinx: '' - url: https://conda.anaconda.org/conda-forge/noarch/sphinx-issues-1.2.0-py_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/sphinx-issues-5.0.0-pyhd8ed1ab_0.conda hash: - md5: 2d5c0dddca9bb724dcf5a3fb295a2266 - sha256: 9d98392bff12194c45c6f13c6c93d0b15b2fe489de5746654e732009fce41a86 + md5: 0c814aa8b5767e263a09326e973d415d + sha256: 13b7ed82f45308702a28198c05fffa3e8a2e58a0a219066826c5b41e926309a0 category: main optional: false - name: sphinx-issues - version: 1.2.0 + version: 5.0.0 manager: conda platform: osx-arm64 dependencies: - python: '' sphinx: '' - url: https://conda.anaconda.org/conda-forge/noarch/sphinx-issues-1.2.0-py_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/sphinx-issues-5.0.0-pyhd8ed1ab_0.conda hash: - md5: 2d5c0dddca9bb724dcf5a3fb295a2266 - sha256: 9d98392bff12194c45c6f13c6c93d0b15b2fe489de5746654e732009fce41a86 + md5: 0c814aa8b5767e263a09326e973d415d + sha256: 13b7ed82f45308702a28198c05fffa3e8a2e58a0a219066826c5b41e926309a0 category: main optional: false - name: sphinx-prompt @@ -14838,8 +14515,8 @@ package: platform: osx-arm64 dependencies: pygments: '' - python: '>=3.0' sphinx: '' + python: '>=3.0' url: https://conda.anaconda.org/conda-forge/noarch/sphinx-prompt-1.4.0-pyhd8ed1ab_0.tar.bz2 hash: md5: 88ee91e8679603f2a5bd036d52919cc2 @@ -14864,8 +14541,8 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' sphinx: '' + python: '>=3.6' url: https://conda.anaconda.org/conda-forge/noarch/sphinx-reredirects-0.1.5-pyhd8ed1ab_0.conda hash: md5: e7c9774b034913cb4877190ce2321f24 @@ -14892,10 +14569,10 @@ package: manager: conda platform: osx-arm64 dependencies: - docutils: '>=0.18.0' pygments: '' python: '>=3.6' sphinx: '>=2' + docutils: '>=0.18.0' url: https://conda.anaconda.org/conda-forge/noarch/sphinx-tabs-3.4.1-pyhd8ed1ab_1.conda hash: md5: 8b8362d876396fd967cbb5f404def907 @@ -14963,10 +14640,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda hash: - md5: 9075bd8c033f0257122300db914e49c9 - sha256: 8ac476358cf26098e3a360b2a9037bd809243f72934c103953e25f4fda4b9f31 + md5: 16e3f039c0aa6446513e94ab18a8784b + sha256: d7433a344a9ad32a680b881c81b0034bc61618d12c39dd6e3309abeffa9577ba category: main optional: false - name: sphinxcontrib-applehelp @@ -14976,10 +14653,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda hash: - md5: 9075bd8c033f0257122300db914e49c9 - sha256: 8ac476358cf26098e3a360b2a9037bd809243f72934c103953e25f4fda4b9f31 + md5: 16e3f039c0aa6446513e94ab18a8784b + sha256: d7433a344a9ad32a680b881c81b0034bc61618d12c39dd6e3309abeffa9577ba category: main optional: false - name: sphinxcontrib-devhelp @@ -14989,10 +14666,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda hash: - md5: b3bcc38c471ebb738854f52a36059b48 - sha256: 6790efe55f168816dfc9c14235054d5156e5150d28546c5baf2ff4973eff8f6b + md5: 910f28a05c178feba832f842155cbfff + sha256: 55d5076005d20b84b20bee7844e686b7e60eb9f683af04492e598a622b12d53d category: main optional: false - name: sphinxcontrib-devhelp @@ -15002,10 +14679,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda hash: - md5: b3bcc38c471ebb738854f52a36059b48 - sha256: 6790efe55f168816dfc9c14235054d5156e5150d28546c5baf2ff4973eff8f6b + md5: 910f28a05c178feba832f842155cbfff + sha256: 55d5076005d20b84b20bee7844e686b7e60eb9f683af04492e598a622b12d53d category: main optional: false - name: sphinxcontrib-htmlhelp @@ -15015,10 +14692,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda hash: - md5: e25640d692c02e8acfff0372f547e940 - sha256: 55e14b77ed786ab6ff752b8d75f8448536f385ed250f432bd408d2eff5ea4a9e + md5: e9fb3fe8a5b758b4aff187d434f94f03 + sha256: c1492c0262ccf16694bdcd3bb62aa4627878ea8782d5cd3876614ffeb62b3996 category: main optional: false - name: sphinxcontrib-htmlhelp @@ -15028,10 +14705,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda hash: - md5: e25640d692c02e8acfff0372f547e940 - sha256: 55e14b77ed786ab6ff752b8d75f8448536f385ed250f432bd408d2eff5ea4a9e + md5: e9fb3fe8a5b758b4aff187d434f94f03 + sha256: c1492c0262ccf16694bdcd3bb62aa4627878ea8782d5cd3876614ffeb62b3996 category: main optional: false - name: sphinxcontrib-jsmath @@ -15039,11 +14716,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda hash: - md5: da1d979339e2714c30a8e806a33ec087 - sha256: d4337d83b8edba688547766fc80f1ac86d6ec86ceeeda93f376acc04079c5ce2 + md5: fa839b5ff59e192f411ccc7dae6588bb + sha256: 578bef5ec630e5b2b8810d898bbbf79b9ae66d49b7938bcc3efc364e679f2a62 category: main optional: false - name: sphinxcontrib-jsmath @@ -15051,11 +14728,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda hash: - md5: da1d979339e2714c30a8e806a33ec087 - sha256: d4337d83b8edba688547766fc80f1ac86d6ec86ceeeda93f376acc04079c5ce2 + md5: fa839b5ff59e192f411ccc7dae6588bb + sha256: 578bef5ec630e5b2b8810d898bbbf79b9ae66d49b7938bcc3efc364e679f2a62 category: main optional: false - name: sphinxcontrib-mermaid @@ -15077,9 +14754,9 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' pyyaml: '' sphinx: '' + python: '>=3.7' url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-mermaid-1.0.0-pyhd8ed1ab_0.conda hash: md5: a906d0a778a54834ffd15d22bdda9ddd @@ -15093,10 +14770,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda hash: - md5: d6e5ea5fe00164ac6c2dcc5d76a42192 - sha256: 7ae639b729844de2ec74dbaf1acccc14843868a82fa46cd2ceb735bc8266af5b + md5: 00534ebcc0375929b45c3039b5ba7636 + sha256: c664fefae4acdb5fae973bdde25836faf451f41d04342b64a358f9a7753c92ca category: main optional: false - name: sphinxcontrib-qthelp @@ -15106,10 +14783,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda hash: - md5: d6e5ea5fe00164ac6c2dcc5d76a42192 - sha256: 7ae639b729844de2ec74dbaf1acccc14843868a82fa46cd2ceb735bc8266af5b + md5: 00534ebcc0375929b45c3039b5ba7636 + sha256: c664fefae4acdb5fae973bdde25836faf451f41d04342b64a358f9a7753c92ca category: main optional: false - name: sphinxcontrib-serializinghtml @@ -15119,10 +14796,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda hash: - md5: e507335cb4ca9cff4c3d0fa9cdab255e - sha256: bf80e4c0ff97d5e8e5f6db0831ba60007e820a3a438e8f1afd868aa516d67d6f + md5: 3bc61f7161d28137797e038263c04c54 + sha256: 64d89ecc0264347486971a94487cb8d7c65bfc0176750cf7502b8a272f4ab557 category: main optional: false - name: sphinxcontrib-serializinghtml @@ -15132,10 +14809,10 @@ package: dependencies: python: '>=3.9' sphinx: '>=5' - url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda hash: - md5: e507335cb4ca9cff4c3d0fa9cdab255e - sha256: bf80e4c0ff97d5e8e5f6db0831ba60007e820a3a438e8f1afd868aa516d67d6f + md5: 3bc61f7161d28137797e038263c04c54 + sha256: 64d89ecc0264347486971a94487cb8d7c65bfc0176750cf7502b8a272f4ab557 category: main optional: false - name: sphinxcontrib-youtube @@ -15157,8 +14834,8 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' requests: '' + python: '>=3.6' sphinx: '>=6.1' url: https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-youtube-1.4.1-pyhd8ed1ab_0.conda hash: @@ -15167,7 +14844,7 @@ package: category: main optional: false - name: sqlalchemy - version: 2.0.35 + version: 2.0.36 manager: conda platform: linux-64 dependencies: @@ -15177,14 +14854,14 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '>=4.6.0' - url: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.35-py39h8cd3c5a_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.36-py39h8cd3c5a_0.conda hash: - md5: 7d3f84c7487339c10e59188cbf9f7ed6 - sha256: 525f88155a0404e6659eab62a432391cded508f60a27e374d5c47e5fc91f3e63 + md5: f34b37aa5dcc354714d89c2ad1cb1937 + sha256: b282312f0c1c4f82317d76b0fb8e420d027bc9146cef63cf6703a8993c1bf68c category: main optional: false - name: sqlalchemy - version: 2.0.35 + version: 2.0.36 manager: conda platform: osx-arm64 dependencies: @@ -15193,97 +14870,134 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '>=4.6.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.35-py39h06df861_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.36-py39h57695bc_0.conda hash: - md5: a87df2ec09dd056da2718815b663aff5 - sha256: 028de9c981047aeceff962da63da5bce0b4201d03f612aed7026c47757451715 + md5: b3e23a18bed9d2856f9479dc1342cc21 + sha256: 29f8fc485df63b47de893f0ae9a9c9f0e842318d87ceed672b77787efb318b9b category: main optional: false - name: sqlite - version: 3.46.1 + version: 3.47.2 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - libsqlite: 3.46.1 + libsqlite: 3.47.2 libzlib: '>=1.3.1,<2.0a0' ncurses: '>=6.5,<7.0a0' readline: '>=8.2,<9.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.1-h9eae976_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda hash: - md5: b2b3e737da0ae347e16ef1970a5d3f14 - sha256: 8c6245f988a2e1f4eef8456726b9cc46f2462448e61daa4bad2f9e4ca601598a + md5: 64a954de15d114281535a26fd4d1f294 + sha256: 8bda8238ee98e318aad2c54ab3c85c533c830ecba72486c616b7c8546b9b51f7 category: main optional: false - name: sqlite - version: 3.46.1 + version: 3.47.2 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' - libsqlite: 3.46.1 + libsqlite: 3.47.2 libzlib: '>=1.3.1,<2.0a0' ncurses: '>=6.5,<7.0a0' readline: '>=8.2,<9.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/sqlite-3.46.1-h3b4c4e4_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/sqlite-3.47.2-hd7222ec_0.conda hash: - md5: 78996531776f6a277cac5a14cf590b6a - sha256: 91c8becaddc4593adba50eb27c4e47deafd879cfc3a569cc6db767b5ee6d8146 + md5: fcde11e05577e05f3b69b046822b7529 + sha256: 7b7e81b1cfce888d8591c8e4a6df0a1854c291dcd2a623a371f806130bb01048 category: main optional: false - name: sqlparse - version: 0.5.1 + version: 0.5.2 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/sqlparse-0.5.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/sqlparse-0.5.2-pyhd8ed1ab_1.conda hash: - md5: e8af29e73e8b5906d8882c1f67222d34 - sha256: a8542ef3d0cbb5db6407d9b1ae8cb309fc59c182eecc5752449342eed7add969 + md5: b7340ce27df363a563fba2a1f4679220 + sha256: 2248a303d42d3521aa9d9640b195790125519db03266d565780af44dbae8d9c6 category: main optional: false - name: sqlparse - version: 0.5.1 + version: 0.5.2 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/sqlparse-0.5.1-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/sqlparse-0.5.2-pyhd8ed1ab_1.conda hash: - md5: e8af29e73e8b5906d8882c1f67222d34 - sha256: a8542ef3d0cbb5db6407d9b1ae8cb309fc59c182eecc5752449342eed7add969 + md5: b7340ce27df363a563fba2a1f4679220 + sha256: 2248a303d42d3521aa9d9640b195790125519db03266d565780af44dbae8d9c6 category: main optional: false - name: stack_data - version: 0.6.2 + version: 0.6.3 manager: conda platform: linux-64 dependencies: asttokens: '' executing: '' pure_eval: '' - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda hash: - md5: e7df0fdd404616638df5ece6e69ba7af - sha256: a58433e75229bec39f3be50c02efbe9b7083e53a1f31d8ee247564f370191eec + md5: b1b505328da7a6b246787df4b5a49fbc + sha256: 570da295d421661af487f1595045760526964f41471021056e993e73089e9c41 category: main optional: false - name: stack_data - version: 0.6.2 + version: 0.6.3 manager: conda platform: osx-arm64 dependencies: asttokens: '' executing: '' pure_eval: '' - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda + hash: + md5: b1b505328da7a6b246787df4b5a49fbc + sha256: 570da295d421661af487f1595045760526964f41471021056e993e73089e9c41 + category: main + optional: false +- name: stdlib-list + version: 0.11.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/stdlib-list-0.11.0-pyhd8ed1ab_0.conda + hash: + md5: 0f3ae9b96c6072d680c505425515fad1 + sha256: cb17fc652f2fc22a0d8bce647f239b7a10b002f6b5794447f1f154558f6afc26 + category: main + optional: false +- name: stdlib-list + version: 0.11.0 + manager: conda + platform: osx-arm64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/stdlib-list-0.11.0-pyhd8ed1ab_0.conda + hash: + md5: 0f3ae9b96c6072d680c505425515fad1 + sha256: cb17fc652f2fc22a0d8bce647f239b7a10b002f6b5794447f1f154558f6afc26 + category: main + optional: false +- name: svt-av1 + version: 2.3.0 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + libcxx: '>=17' + url: https://conda.anaconda.org/conda-forge/osx-arm64/svt-av1-2.3.0-hf24288c_0.conda hash: - md5: e7df0fdd404616638df5ece6e69ba7af - sha256: a58433e75229bec39f3be50c02efbe9b7083e53a1f31d8ee247564f370191eec + md5: 114c33e9eec335a379c9ee6c498bb807 + sha256: ab876ed8bdd20e22a868dcb8d03e9ce9bbba7762d7e652d49bfff6af768a5b8f category: main optional: false - name: swagger-spec-validator @@ -15308,12 +15022,12 @@ package: manager: conda platform: osx-arm64 dependencies: - jsonschema: '' - python: '>=3.6' pyyaml: '' setuptools: '' six: '' typing_extensions: '' + jsonschema: '' + python: '>=3.6' url: https://conda.anaconda.org/conda-forge/noarch/swagger-spec-validator-3.0.4-pyhd8ed1ab_0.conda hash: md5: 01a7efc913916d04c4f2ebc9cdbeaeb1 @@ -15343,9 +15057,9 @@ package: dependencies: __unix: '' cpython: '' - gmpy2: '>=2.0.8' - mpmath: '>=0.19' python: '>=3.8' + mpmath: '>=0.19' + gmpy2: '>=2.0.8' url: https://conda.anaconda.org/conda-forge/noarch/sympy-1.13.3-pyh2585a3b_104.conda hash: md5: 68085d736d2b2f54498832b65059875d @@ -15357,11 +15071,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda hash: - md5: 4759805cce2d914c38472f70bf4d8bcb - sha256: f6e4a0dd24ba060a4af69ca79d32361a6678e61d78c73eb5e357909b025b4620 + md5: 959484a66b4b76befcddc4fa97c95567 + sha256: 090023bddd40d83468ef86573976af8c514f64119b2bd814ee63a838a542720a category: main optional: false - name: tabulate @@ -15369,11 +15083,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_1.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tabulate-0.9.0-pyhd8ed1ab_2.conda hash: - md5: 4759805cce2d914c38472f70bf4d8bcb - sha256: f6e4a0dd24ba060a4af69ca79d32361a6678e61d78c73eb5e357909b025b4620 + md5: 959484a66b4b76befcddc4fa97c95567 + sha256: 090023bddd40d83468ef86573976af8c514f64119b2bd814ee63a838a542720a category: main optional: false - name: tbb @@ -15383,12 +15097,12 @@ package: dependencies: __glibc: '>=2.17,<3.0.a0' libgcc: '>=13' - libhwloc: '>=2.11.1,<2.11.2.0a0' + libhwloc: '>=2.11.2,<2.11.3.0a0' libstdcxx: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-hceb3a55_1.conda hash: - md5: ee6f7fd1e76061ef1fa307d41fa86a96 - sha256: 7d4d3ad608dc6ae5a7e0f431f784985398a18bcde2ba3ce19cc32f61e2defd98 + md5: ba7726b8df7b9d34ea80e82b097a4893 + sha256: 65463732129899770d54b1fbf30e1bb82fdebda9d7553caf08d23db4590cd691 category: main optional: false - name: tblib @@ -15396,11 +15110,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda hash: - md5: 04eedddeb68ad39871c8127dd1c21f4f - sha256: 2e2c255b6f24a6d75b9938cb184520e27db697db2c24f04e18342443ae847c0a + md5: 60ce69f73f3e75b21f1c27b1b471320c + sha256: 6869cd2e043426d30c84d0ff6619f176b39728f9c75dc95dca89db994548bb8a category: main optional: false - name: tblib @@ -15408,11 +15122,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda hash: - md5: 04eedddeb68ad39871c8127dd1c21f4f - sha256: 2e2c255b6f24a6d75b9938cb184520e27db697db2c24f04e18342443ae847c0a + md5: 60ce69f73f3e75b21f1c27b1b471320c + sha256: 6869cd2e043426d30c84d0ff6619f176b39728f9c75dc95dca89db994548bb8a category: main optional: false - name: tenacity @@ -15420,11 +15134,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/tenacity-9.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tenacity-9.0.0-pyhd8ed1ab_1.conda hash: - md5: 42af51ad3b654ece73572628ad2882ae - sha256: 0d33171e1d303b57867f0cfcffb8a35031700acb3c52b1862064d8f4e1085538 + md5: a09f66fe95a54a92172e56a4a97ba271 + sha256: dcf2155fb959773fb102066bfab8e7d79aff67054d142716979274a43fc85735 category: main optional: false - name: tenacity @@ -15432,11 +15146,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/tenacity-9.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tenacity-9.0.0-pyhd8ed1ab_1.conda hash: - md5: 42af51ad3b654ece73572628ad2882ae - sha256: 0d33171e1d303b57867f0cfcffb8a35031700acb3c52b1862064d8f4e1085538 + md5: a09f66fe95a54a92172e56a4a97ba271 + sha256: dcf2155fb959773fb102066bfab8e7d79aff67054d142716979274a43fc85735 category: main optional: false - name: tensorboard @@ -15468,17 +15182,17 @@ package: manager: conda platform: osx-arm64 dependencies: - absl-py: '>=0.4' - grpcio: '>=1.48.2' - markdown: '>=2.6.8' - numpy: '>=1.12.0' packaging: '' - protobuf: '>=3.19.6,!=4.24.0' python: '>=3.8' + six: '>=1.9' + numpy: '>=1.12.0' setuptools: '>=41.0.0' - six: '>=1.9' - tensorboard-data-server: '>=0.7.0,<0.8.0' + markdown: '>=2.6.8' + absl-py: '>=0.4' werkzeug: '>=1.0.1' + grpcio: '>=1.48.2' + tensorboard-data-server: '>=0.7.0,<0.8.0' + protobuf: '>=3.19.6,!=4.24.0' url: https://conda.anaconda.org/conda-forge/noarch/tensorboard-2.17.1-pyhd8ed1ab_0.conda hash: md5: 1a6c7a02498c952ba92e08b0abe0c48c @@ -15541,10 +15255,10 @@ package: python_abi: 3.9.* tensorflow-base: 2.17.0 tensorflow-estimator: 2.17.0 - url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-2.17.0-cpu_py39h0355d61_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-2.17.0-cpu_py39h0355d61_3.conda hash: - md5: f35c6fd018236aeed3b91acfaeb3e1cb - sha256: f57a8b10fe64e0595eee51553a7424f72613d72099f97a5b2e82defa91ae0148 + md5: d03c18880d8bc98fadd2f44b2fbb983a + sha256: 29aea3b9c1d5b390a164381025ff4079d8017861ba745401d3f328062c608723 category: main optional: false - name: tensorflow-base @@ -15610,25 +15324,25 @@ package: gast: '>=0.2.1,!=0.5.0,!=0.5.1,!=0.5.2' giflib: '>=5.2.2,<5.3.0a0' google-pasta: '>=0.1.1' - grpcio: 1.62.* + grpcio: 1.65.* h5py: '>=3.10' icu: '>=75.1,<76.0a0' keras: '>=3.0' - libabseil: '>=20240116.2,<20240117.0a0' - libcurl: '>=8.8.0,<9.0a0' - libcxx: '>=16' - libgrpc: '>=1.62.2,<1.63.0a0' + libabseil: '>=20240722.0,<20240723.0a0' + libcurl: '>=8.10.1,<9.0a0' + libcxx: '>=17' + libgrpc: '>=1.65.5,<1.66.0a0' libjpeg-turbo: '>=3.0.0,<4.0a0' - libpng: '>=1.6.43,<1.7.0a0' - libprotobuf: '>=4.25.3,<4.25.4.0a0' - libsqlite: '>=3.46.0,<4.0a0' + libpng: '>=1.6.44,<1.7.0a0' + libprotobuf: '>=5.27.5,<5.27.6.0a0' + libsqlite: '>=3.46.1,<4.0a0' libzlib: '>=1.3.1,<2.0a0' ml_dtypes: '>=0.4.0,<0.5' numpy: '>=1.22.4,<2.0a0' - openssl: '>=3.3.1,<4.0a0' + openssl: '>=3.3.2,<4.0a0' opt_einsum: '>=2.3.2' packaging: '' - protobuf: '>=3.20.3,<5,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' + protobuf: '>=5.26,<6' python: '>=3.9,<3.10.0a0' python-flatbuffers: '>=24.3.25' python_abi: 3.9.* @@ -15639,10 +15353,10 @@ package: termcolor: '>=1.1.0' typing_extensions: '>=3.6.6' wrapt: '>=1.11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-base-2.17.0-cpu_py39h0c1ebbd_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-base-2.17.0-cpu_py39hb5ef50e_3.conda hash: - md5: 8ce3460bfb16541440a81126a9e8b69c - sha256: 89ecb9444344eaf3c9f028a014a83ac4408d5a381ff833193febd19854852e9f + md5: 670208cafba201087f5c610fbc1b7efa + sha256: f71aa41c966ac168e9df72f1c2203f1303fc8275658e791173e0ec6d5e7c1ed0 category: main optional: false - name: tensorflow-estimator @@ -15670,15 +15384,15 @@ package: platform: osx-arm64 dependencies: __osx: '>=11.0' - libcxx: '>=16' - openssl: '>=3.3.1,<4.0a0' + libcxx: '>=17' + openssl: '>=3.3.2,<4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* tensorflow-base: 2.17.0 - url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-estimator-2.17.0-cpu_py39h9ff499c_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-estimator-2.17.0-cpu_py39h5290675_3.conda hash: - md5: 9bd489af0f486734c5928a413f56b46d - sha256: 691d814468e021a2e417cc0b9b399189969c317216866899e37dfdd5071bdc44 + md5: 0fe83d2be81fb679d492b2ae112a41e6 + sha256: 2e6bd845e9d2056a3ff3f0a09b74e71a70a50f239e09827740ec9d9d7d91b838 category: main optional: false - name: termcolor @@ -15687,10 +15401,10 @@ package: platform: linux-64 dependencies: python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_1.conda hash: - md5: 29a5d22565b850099cd9959862d1b154 - sha256: 926c6e9f900b1e871456dec7aa2c3150c3d6e4da547c05f667a04cd22e3e3a10 + md5: 1ce02d60767af357e864ce61895268d2 + sha256: 4a7e13776ebd78afcdba3985ea42317e4e0a20722d2c27ecaae3d9f8849e6516 category: main optional: false - name: termcolor @@ -15699,10 +15413,10 @@ package: platform: osx-arm64 dependencies: python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_1.conda hash: - md5: 29a5d22565b850099cd9959862d1b154 - sha256: 926c6e9f900b1e871456dec7aa2c3150c3d6e4da547c05f667a04cd22e3e3a10 + md5: 1ce02d60767af357e864ce61895268d2 + sha256: 4a7e13776ebd78afcdba3985ea42317e4e0a20722d2c27ecaae3d9f8849e6516 category: main optional: false - name: terminado @@ -15758,13 +15472,13 @@ package: manager: conda platform: osx-arm64 dependencies: - numpy: '>=1.14.1' - onnx: '>=1.4.1' - python: '>=3.8' - python-flatbuffers: '>=1.12' requests: '' six: '' + python: '>=3.8' + numpy: '>=1.14.1' tensorflow: '>=2.6' + python-flatbuffers: '>=1.12' + onnx: '>=1.4.1' url: https://conda.anaconda.org/conda-forge/noarch/tf2onnx-1.16.1-pyhd8ed1ab_0.conda hash: md5: 3882e49e3d01c69231a7e48d09ca0ec6 @@ -15814,61 +15528,30 @@ package: sha256: 59b8ffdff6ed696cd1a7cc84f3fe0bc43f540ae45032b654fc3c7edd6bce2ddf category: main optional: false -- name: tiledb - version: 2.26.1 - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - aws-crt-cpp: '>=0.28.3,<0.28.4.0a0' - aws-sdk-cpp: '>=1.11.407,<1.11.408.0a0' - azure-core-cpp: '>=1.13.0,<1.13.1.0a0' - azure-identity-cpp: '>=1.8.0,<1.8.1.0a0' - azure-storage-blobs-cpp: '>=12.12.0,<12.12.1.0a0' - azure-storage-common-cpp: '>=12.7.0,<12.7.1.0a0' - bzip2: '>=1.0.8,<2.0a0' - fmt: '>=11.0.2,<12.0a0' - libabseil: '>=20240116.2,<20240117.0a0' - libcurl: '>=8.10.1,<9.0a0' - libcxx: '>=17' - libgoogle-cloud: '>=2.29.0,<2.30.0a0' - libgoogle-cloud-storage: '>=2.29.0,<2.30.0a0' - libwebp-base: '>=1.4.0,<2.0a0' - libzlib: '>=1.3.1,<2.0a0' - lz4-c: '>=1.9.3,<1.10.0a0' - openssl: '>=3.3.2,<4.0a0' - spdlog: '>=1.14.1,<1.15.0a0' - zstd: '>=1.5.6,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/tiledb-2.26.1-hb36ea6a_1.conda - hash: - md5: 218080254c44db492a1d2e9261821c99 - sha256: a6525286dc92101b44c966499ec6f26d2854da111d6176760df4232a17e8def9 - category: main - optional: false - name: tinycss2 - version: 1.3.0 + version: 1.4.0 manager: conda platform: linux-64 dependencies: python: '>=3.5' webencodings: '>=0.4' - url: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda hash: - md5: 8662629d9a05f9cff364e31ca106c1ac - sha256: bc55e5899e66805589c02061e315bfc23ae6cc2f2811f5cc13fb189a5ed9d90f + md5: f1acf5fdefa8300de697982bcb1761c9 + sha256: cad582d6f978276522f84bd209a5ddac824742fe2d452af6acf900f8650a73a2 category: main optional: false - name: tinycss2 - version: 1.3.0 + version: 1.4.0 manager: conda platform: osx-arm64 dependencies: python: '>=3.5' webencodings: '>=0.4' - url: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda hash: - md5: 8662629d9a05f9cff364e31ca106c1ac - sha256: bc55e5899e66805589c02061e315bfc23ae6cc2f2811f5cc13fb189a5ed9d90f + md5: f1acf5fdefa8300de697982bcb1761c9 + sha256: cad582d6f978276522f84bd209a5ddac824742fe2d452af6acf900f8650a73a2 category: main optional: false - name: tk @@ -15897,27 +15580,27 @@ package: category: main optional: false - name: tomli - version: 2.0.2 + version: 2.2.1 manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda hash: - md5: e977934e00b355ff55ed154904044727 - sha256: 5e742ba856168b606ac3c814d247657b1c33b8042371f1a08000bdc5075bc0cc + md5: ac944244f1fed2eb49bae07193ae8215 + sha256: 18636339a79656962723077df9a56c0ac7b8a864329eb8f847ee3d38495b863e category: main optional: false - name: tomli - version: 2.0.2 + version: 2.2.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda hash: - md5: e977934e00b355ff55ed154904044727 - sha256: 5e742ba856168b606ac3c814d247657b1c33b8042371f1a08000bdc5075bc0cc + md5: ac944244f1fed2eb49bae07193ae8215 + sha256: 18636339a79656962723077df9a56c0ac7b8a864329eb8f847ee3d38495b863e category: main optional: false - name: tomlkit @@ -15925,11 +15608,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_1.conda hash: - md5: 0062a5f3347733f67b0f33ca48cc21dd - sha256: 2ccfe8dafdc1f1af944bca6bdf28fa97b5fa6125d84b8895a4e918a020853c12 + md5: 1d9ab4fc875c52db83f9c9b40af4e2c8 + sha256: 986fae65f5568e95dbf858d08d77a0f9cca031345a98550f1d4b51d36d8811e2 category: main optional: false - name: tomlkit @@ -15937,11 +15620,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_1.conda hash: - md5: 0062a5f3347733f67b0f33ca48cc21dd - sha256: 2ccfe8dafdc1f1af944bca6bdf28fa97b5fa6125d84b8895a4e918a020853c12 + md5: 1d9ab4fc875c52db83f9c9b40af4e2c8 + sha256: 986fae65f5568e95dbf858d08d77a0f9cca031345a98550f1d4b51d36d8811e2 category: main optional: false - name: toolz @@ -15949,11 +15632,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda hash: - md5: 34feccdd4177f2d3d53c73fc44fd9a37 - sha256: 6371cf3cf8292f2abdcc2bf783d6e70203d72f8ff0c1625f55a486711e276c75 + md5: 40d0ed782a8aaa16ef248e68c06c168d + sha256: eda38f423c33c2eaeca49ed946a8d3bf466cc3364970e083a65eb2fd85258d87 category: main optional: false - name: toolz @@ -15961,15 +15644,15 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda hash: - md5: 34feccdd4177f2d3d53c73fc44fd9a37 - sha256: 6371cf3cf8292f2abdcc2bf783d6e70203d72f8ff0c1625f55a486711e276c75 + md5: 40d0ed782a8aaa16ef248e68c06c168d + sha256: eda38f423c33c2eaeca49ed946a8d3bf466cc3364970e083a65eb2fd85258d87 category: main optional: false - name: tornado - version: 6.4.1 + version: 6.4.2 manager: conda platform: linux-64 dependencies: @@ -15977,50 +15660,50 @@ package: libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py39h8cd3c5a_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py39h8cd3c5a_0.conda hash: - md5: 48d269953fcddbbcde078429d4b27afe - sha256: 42a44ab25b062758a8f166ac4d458a40958101c1b8b33467e4dda0f1bde04752 + md5: ebfd05ae1501660e995a8b6bbe02a391 + sha256: 3c9a90f755ce097ab884bf1ea99ac1033007753a6538cae65747fddc4b74481e category: main optional: false - name: tornado - version: 6.4.1 + version: 6.4.2 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.4.1-py39h06df861_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/tornado-6.4.2-py39hf3bc14e_0.conda hash: - md5: 8e8fef76e7303a0a8e4430a89c440008 - sha256: bb1fc1a65c29386d064193819a468d43dbfb4fb8009088366a2ad29c9f6a6fdc + md5: 868a36c47fe13a70e940c0e40cea578d + sha256: 29b11ca330baf190a478c12a90cd50040e1fff91c419d2604d9964ae4773de81 category: main optional: false - name: tqdm - version: 4.66.5 + version: 4.67.1 manager: conda platform: linux-64 dependencies: colorama: '' python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_0.conda hash: - md5: c6e94fc2b2ec71ea33fe7c7da259acb4 - sha256: f2384902cef72048b0e9bad5c03d7a843de02ba6bc8618a9ecab6ff81a131312 + md5: 4085c9db273a148e149c03627350e22c + sha256: 5673b7104350a6998cb86cccf1d0058217d86950e8d6c927d8530606028edb1d category: main optional: false - name: tqdm - version: 4.66.5 + version: 4.67.1 manager: conda platform: osx-arm64 dependencies: colorama: '' python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.1-pyhd8ed1ab_0.conda hash: - md5: c6e94fc2b2ec71ea33fe7c7da259acb4 - sha256: f2384902cef72048b0e9bad5c03d7a843de02ba6bc8618a9ecab6ff81a131312 + md5: 4085c9db273a148e149c03627350e22c + sha256: 5673b7104350a6998cb86cccf1d0058217d86950e8d6c927d8530606028edb1d category: main optional: false - name: traitlets @@ -16028,11 +15711,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda hash: - md5: 3df84416a021220d8b5700c613af2dc5 - sha256: 8a64fa0f19022828513667c2c7176cfd125001f3f4b9bc00d33732e627dd2592 + md5: 019a7385be9af33791c989871317e1ed + sha256: f39a5620c6e8e9e98357507262a7869de2ae8cc07da8b7f84e517c9fd6c2b959 category: main optional: false - name: traitlets @@ -16040,63 +15723,65 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda hash: - md5: 3df84416a021220d8b5700c613af2dc5 - sha256: 8a64fa0f19022828513667c2c7176cfd125001f3f4b9bc00d33732e627dd2592 + md5: 019a7385be9af33791c989871317e1ed + sha256: f39a5620c6e8e9e98357507262a7869de2ae8cc07da8b7f84e517c9fd6c2b959 category: main optional: false - name: typeguard - version: 4.3.0 + version: 4.4.1 manager: conda platform: linux-64 dependencies: importlib-metadata: '>=3.6' - python: '>=3.8' + python: '>=3.9' typing-extensions: '>=4.10.0' - url: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda + typing_extensions: '>=4.10.0' + url: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.4.1-pyhd8ed1ab_1.conda hash: - md5: 10f49ee1beb82947170c5a5e1a8c0ef3 - sha256: 1bbf56b43b7a4f696e6d4027404865519bc676760129580ba558555dedfdcfa9 + md5: 2de116bbe966ec72715f2423337438dd + sha256: 155881d5cdf4e608fa60bbc41442f9872ae4f13089c6d4e6daaab15738f03b35 category: main optional: false - name: typeguard - version: 4.3.0 + version: 4.4.1 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' importlib-metadata: '>=3.6' - python: '>=3.8' typing-extensions: '>=4.10.0' - url: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.3.0-pyhd8ed1ab_1.conda + typing_extensions: '>=4.10.0' + url: https://conda.anaconda.org/conda-forge/noarch/typeguard-4.4.1-pyhd8ed1ab_1.conda hash: - md5: 10f49ee1beb82947170c5a5e1a8c0ef3 - sha256: 1bbf56b43b7a4f696e6d4027404865519bc676760129580ba558555dedfdcfa9 + md5: 2de116bbe966ec72715f2423337438dd + sha256: 155881d5cdf4e608fa60bbc41442f9872ae4f13089c6d4e6daaab15738f03b35 category: main optional: false - name: types-python-dateutil - version: 2.9.0.20241003 + version: 2.9.0.20241206 manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda hash: - md5: 3d326f8a2aa2d14d51d8c513426b5def - sha256: 8489af986daebfbcd13d3748ba55431259206e37f184ab42a57e107fecd85e02 + md5: 1dbc4a115e2ad9fb7f9d5b68397f66f9 + sha256: 8b98cd9464837174ab58aaa912fc95d5831879864676650a383994033533b8d1 category: main optional: false - name: types-python-dateutil - version: 2.9.0.20241003 + version: 2.9.0.20241206 manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241003-pyhff2d567_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20241206-pyhd8ed1ab_0.conda hash: - md5: 3d326f8a2aa2d14d51d8c513426b5def - sha256: 8489af986daebfbcd13d3748ba55431259206e37f184ab42a57e107fecd85e02 + md5: 1dbc4a115e2ad9fb7f9d5b68397f66f9 + sha256: 8b98cd9464837174ab58aaa912fc95d5831879864676650a383994033533b8d1 category: main optional: false - name: typing @@ -16104,11 +15789,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3' - url: https://conda.anaconda.org/conda-forge/noarch/typing-3.10.0.0-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/typing-3.10.0.0-pyhd8ed1ab_2.conda hash: - md5: 4cea64207b982dcab46c40a4d61d41cf - sha256: 19db4284382933e3a6c142824834b52a1f2118d3b3a01afae815e46f99c6cb32 + md5: 28abeb80aea7eb4914f3a7543a47e248 + sha256: 92b084dfd77571be23ef84ad695bbea169e844821484b6d47d99f04ea4de32e8 category: main optional: false - name: typing @@ -16116,11 +15801,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3' - url: https://conda.anaconda.org/conda-forge/noarch/typing-3.10.0.0-pyhd8ed1ab_1.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/typing-3.10.0.0-pyhd8ed1ab_2.conda hash: - md5: 4cea64207b982dcab46c40a4d61d41cf - sha256: 19db4284382933e3a6c142824834b52a1f2118d3b3a01afae815e46f99c6cb32 + md5: 28abeb80aea7eb4914f3a7543a47e248 + sha256: 92b084dfd77571be23ef84ad695bbea169e844821484b6d47d99f04ea4de32e8 category: main optional: false - name: typing-extensions @@ -16129,10 +15814,10 @@ package: platform: linux-64 dependencies: typing_extensions: 4.12.2 - url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda hash: - md5: 52d648bd608f5737b123f510bb5514b5 - sha256: d3b9a8ed6da7c9f9553c5fd8a4fca9c3e0ab712fa5f497859f82337d67533b73 + md5: b6a408c64b78ec7b779a3e5c7a902433 + sha256: c8e9c1c467b5f960b627d7adc1c65fece8e929a3de89967e91ef0f726422fd32 category: main optional: false - name: typing-extensions @@ -16141,10 +15826,10 @@ package: platform: osx-arm64 dependencies: typing_extensions: 4.12.2 - url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda hash: - md5: 52d648bd608f5737b123f510bb5514b5 - sha256: d3b9a8ed6da7c9f9553c5fd8a4fca9c3e0ab712fa5f497859f82337d67533b73 + md5: b6a408c64b78ec7b779a3e5c7a902433 + sha256: c8e9c1c467b5f960b627d7adc1c65fece8e929a3de89967e91ef0f726422fd32 category: main optional: false - name: typing_extensions @@ -16152,11 +15837,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda hash: - md5: ebe6952715e1d5eb567eeebf25250fa7 - sha256: 0fce54f8ec3e59f5ef3bb7641863be4e1bf1279623e5af3d3fa726e8f7628ddb + md5: d17f13df8b65464ca316cbc000a3cb64 + sha256: 337be7af5af8b2817f115b3b68870208b30c31d3439bec07bfb2d8f4823e3568 category: main optional: false - name: typing_extensions @@ -16164,11 +15849,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda hash: - md5: ebe6952715e1d5eb567eeebf25250fa7 - sha256: 0fce54f8ec3e59f5ef3bb7641863be4e1bf1279623e5af3d3fa726e8f7628ddb + md5: d17f13df8b65464ca316cbc000a3cb64 + sha256: 337be7af5af8b2817f115b3b68870208b30c31d3439bec07bfb2d8f4823e3568 category: main optional: false - name: typing_inspect @@ -16177,12 +15862,12 @@ package: platform: linux-64 dependencies: mypy_extensions: '>=0.3.0' - python: '>=3.5' + python: '>=3.9' typing_extensions: '>=3.7.4' - url: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_1.conda hash: - md5: 9e924b76b91908a17e28a19a0ab88687 - sha256: 16e0b825c138e14ebc84623248d91d93a8cff29bb93595cc4aa46ca32f24f1de + md5: fa31df4d4193aabccaf09ce78a187faf + sha256: a3fbdd31b509ff16c7314e8d01c41d9146504df632a360ab30dbc1d3ca79b7c0 category: main optional: false - name: typing_inspect @@ -16190,13 +15875,13 @@ package: manager: conda platform: osx-arm64 dependencies: - mypy_extensions: '>=0.3.0' - python: '>=3.5' + python: '>=3.9' typing_extensions: '>=3.7.4' - url: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_0.conda + mypy_extensions: '>=0.3.0' + url: https://conda.anaconda.org/conda-forge/noarch/typing_inspect-0.9.0-pyhd8ed1ab_1.conda hash: - md5: 9e924b76b91908a17e28a19a0ab88687 - sha256: 16e0b825c138e14ebc84623248d91d93a8cff29bb93595cc4aa46ca32f24f1de + md5: fa31df4d4193aabccaf09ce78a187faf + sha256: a3fbdd31b509ff16c7314e8d01c41d9146504df632a360ab30dbc1d3ca79b7c0 category: main optional: false - name: typing_utils @@ -16204,11 +15889,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.6.1' - url: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda hash: - md5: eb67e3cace64c66233e2d35949e20f92 - sha256: 9e3758b620397f56fb709f796969de436d63b7117897159619b87938e1f78739 + md5: f6d7aa696c67756a650e91e15e88223c + sha256: 3088d5d873411a56bf988eee774559335749aed6f6c28e07bf933256afb9eb6c category: main optional: false - name: typing_utils @@ -16216,11 +15901,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.6.1' - url: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_0.tar.bz2 + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/typing_utils-0.1.0-pyhd8ed1ab_1.conda hash: - md5: eb67e3cace64c66233e2d35949e20f92 - sha256: 9e3758b620397f56fb709f796969de436d63b7117897159619b87938e1f78739 + md5: f6d7aa696c67756a650e91e15e88223c + sha256: 3088d5d873411a56bf988eee774559335749aed6f6c28e07bf933256afb9eb6c category: main optional: false - name: tzcode @@ -16236,18 +15921,6 @@ package: sha256: 20c72e7ba106338d51fdc29a717a54fcd52340063232e944dcd1d38fb6348a28 category: main optional: false -- name: tzcode - version: 2024b - manager: conda - platform: osx-arm64 - dependencies: - __osx: '>=11.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/tzcode-2024b-hd74edd7_0.conda - hash: - md5: 2b216eef43a512307c43cf9ddcbb8a87 - sha256: cd765bc7b8637b90a66771af99ea9eef1d532630643c7e4e05afbffb0e9de00c - category: main - optional: false - name: tzdata version: 2024b manager: conda @@ -16349,13 +16022,14 @@ package: manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39hd1e30aa_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py39h8cd3c5a_1.conda hash: - md5: 1da984bbb6e765743e13388ba7b7b2c8 - sha256: 90077cbf116112d5112b7beedf896e59c98416d09860ba98c06a770c014829b2 + md5: 6346898044e4387631c614290789a434 + sha256: 8859d41d01025ea2d1f5448d459e99818757fee472ee718f83d5fb78328e775f category: main optional: false - name: unicodedata2 @@ -16363,12 +16037,13 @@ package: manager: conda platform: osx-arm64 dependencies: + __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-15.1.0-py39h0f82c59_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/unicodedata2-15.1.0-py39h57695bc_1.conda hash: - md5: 39c745ba9443da902afa7f5a9e9dfcac - sha256: 31d33f967f0db811b25a9315bef727cb12a24c76d8ded8947188cc04535b06b0 + md5: ea1c54a65af341878cc7ab4b6275ff7b + sha256: 2aa67870191089c75a2839741e9a76b39751958fa7ec7e19a4f6b8a655b433d5 category: main optional: false - name: uri-template @@ -16376,11 +16051,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda hash: - md5: 0944dc65cb4a9b5b68522c3bb585d41c - sha256: b76904b53721dc88a46352324c79d2b077c2f74a9f7208ad2c4249892669ae94 + md5: e7cb0f5745e4c5035a460248334af7eb + sha256: e0eb6c8daf892b3056f08416a96d68b0a358b7c46b99c8a50481b22631a4dfc0 category: main optional: false - name: uri-template @@ -16388,11 +16063,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/uri-template-1.3.0-pyhd8ed1ab_1.conda hash: - md5: 0944dc65cb4a9b5b68522c3bb585d41c - sha256: b76904b53721dc88a46352324c79d2b077c2f74a9f7208ad2c4249892669ae94 + md5: e7cb0f5745e4c5035a460248334af7eb + sha256: e0eb6c8daf892b3056f08416a96d68b0a358b7c46b99c8a50481b22631a4dfc0 category: main optional: false - name: uriparser @@ -16440,9 +16115,9 @@ package: manager: conda platform: osx-arm64 dependencies: + python: '>=3.7' brotli-python: '>=1.0.9' pysocks: '>=1.5.6,<2.0,!=1.5.7' - python: '>=3.7' url: https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.19-pyhd8ed1ab_0.conda hash: md5: 6bb37c314b3cc1515dcf086ffe01c46e @@ -16517,37 +16192,37 @@ package: category: main optional: false - name: virtualenv - version: 20.26.6 + version: 20.28.0 manager: conda platform: linux-64 dependencies: - distlib: <1,>=0.3.7 - filelock: <4,>=3.12.2 - platformdirs: <5,>=3.9.1 - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + distlib: '>=0.3.7,<1' + filelock: '>=3.12.2,<4' + platformdirs: '>=3.9.1,<5' + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda hash: - md5: a7aa70aa30c47aeb84672621a85a4ef8 - sha256: 23128da47bc0b42b0fef0d41efc10d8ea1fb8232f0846bc4513eeba866f20d13 + md5: 1d601bc1d28b5ce6d112b90f4b9b8ede + sha256: 82776f74e90a296b79415361faa6b10f360755c1fb8e6d59ca68509e6fe7e115 category: main optional: false - name: virtualenv - version: 20.26.6 + version: 20.28.0 manager: conda platform: osx-arm64 dependencies: - distlib: <1,>=0.3.7 - filelock: <4,>=3.12.2 - platformdirs: <5,>=3.9.1 - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.6-pyhd8ed1ab_0.conda + python: '>=3.9' + distlib: '>=0.3.7,<1' + filelock: '>=3.12.2,<4' + platformdirs: '>=3.9.1,<5' + url: https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda hash: - md5: a7aa70aa30c47aeb84672621a85a4ef8 - sha256: 23128da47bc0b42b0fef0d41efc10d8ea1fb8232f0846bc4513eeba866f20d13 + md5: 1d601bc1d28b5ce6d112b90f4b9b8ede + sha256: 82776f74e90a296b79415361faa6b10f360755c1fb8e6d59ca68509e6fe7e115 category: main optional: false - name: wandb - version: 0.18.1 + version: 0.19.0 manager: conda platform: linux-64 dependencies: @@ -16555,27 +16230,31 @@ package: appdirs: '>=1.4.3' click: '>=7.1,!=8.0.0' docker-pycreds: '>=0.4.0' + eval_type_backport: '' gitpython: '>=1.0.0,!=3.1.29' libgcc: '>=13' platformdirs: '' - protobuf: '>=3.19.0,!=4.21.0,<5' + protobuf: '>=3.19.0,!=4.21.0,!=5.28.0,<6' psutil: '>=5.0.0' + pydantic: '>=2.6,<3' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pyyaml: '' requests: '>=2.0.0,<3' - sentry-sdk: '>=1.0.0' + sentry-sdk: '>=2.0.0' setproctitle: '' setuptools: '' - typing_extensions: '' - url: https://conda.anaconda.org/conda-forge/linux-64/wandb-0.18.1-py39h43652db_0.conda + six: '' + typing: '>=3.6.4' + typing_extensions: '>=4.4,<5' + url: https://conda.anaconda.org/conda-forge/linux-64/wandb-0.19.0-py39he612d8f_0.conda hash: - md5: 455fa3cae2dce70779b60ba4b44b0eb6 - sha256: fb1dcd00083a92537ceff4b5177573a8e2f9013a175353e6720658fc2b002d22 + md5: c12b85cf94e6db22ca341273d6623ced + sha256: ef7b92d5bb5ab6cf47b90b146eb7f753e3f1d9e2435d0a7665c3e4639a3ba4c5 category: main optional: false - name: wandb - version: 0.18.1 + version: 0.19.0 manager: conda platform: osx-arm64 dependencies: @@ -16583,22 +16262,26 @@ package: appdirs: '>=1.4.3' click: '>=7.1,!=8.0.0' docker-pycreds: '>=0.4.0' + eval_type_backport: '' gitpython: '>=1.0.0,!=3.1.29' platformdirs: '' - protobuf: '>=3.19.0,!=4.21.0,<5' + protobuf: '>=3.19.0,!=4.21.0,!=5.28.0,<6' psutil: '>=5.0.0' + pydantic: '>=2.6,<3' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pyyaml: '' requests: '>=2.0.0,<3' - sentry-sdk: '>=1.0.0' + sentry-sdk: '>=2.0.0' setproctitle: '' setuptools: '' - typing_extensions: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/wandb-0.18.1-py39h9c3e640_0.conda + six: '' + typing: '>=3.6.4' + typing_extensions: '>=4.4,<5' + url: https://conda.anaconda.org/conda-forge/osx-arm64/wandb-0.19.0-py39hc40b5db_0.conda hash: - md5: 64c684bac6f58ef144313f9884c904a0 - sha256: 7ca153df1f8fd2cb0b0b97afe49525013e2060a08b5eaac6a5396fd63b9ab0fc + md5: fed32235a3659ed5f6d8721ab21eaf8f + sha256: 2f1b8cc500f1dc6542182f81b62f9a6058359119a42ccc7035bade0298d3eeb1 category: main optional: false - name: wcwidth @@ -16606,11 +16289,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda hash: - md5: 68f0738df502a14213624b288c60c9ad - sha256: b6cd2fee7e728e620ec736d8dfee29c6c9e2adbd4e695a31f1d8f834a83e57e3 + md5: b68980f2495d096e71c7fd9d7ccf63e6 + sha256: f21e63e8f7346f9074fd00ca3b079bd3d2fa4d71f1f89d5b6934bf31446dc2a5 category: main optional: false - name: wcwidth @@ -16618,35 +16301,35 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda hash: - md5: 68f0738df502a14213624b288c60c9ad - sha256: b6cd2fee7e728e620ec736d8dfee29c6c9e2adbd4e695a31f1d8f834a83e57e3 + md5: b68980f2495d096e71c7fd9d7ccf63e6 + sha256: f21e63e8f7346f9074fd00ca3b079bd3d2fa4d71f1f89d5b6934bf31446dc2a5 category: main optional: false - name: webcolors - version: 24.8.0 + version: 24.11.1 manager: conda platform: linux-64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda hash: - md5: eb48b812eb4fbb9ff238a6651fdbbcae - sha256: ec71f97c332a7d328ae038990b8090cbfa772f82845b5d2233defd167b7cc5ac + md5: b49f7b291e15494aafb0a7d74806f337 + sha256: 08315dc2e61766a39219b2d82685fc25a56b2817acf84d5b390176080eaacf99 category: main optional: false - name: webcolors - version: 24.8.0 + version: 24.11.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.8.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/webcolors-24.11.1-pyhd8ed1ab_0.conda hash: - md5: eb48b812eb4fbb9ff238a6651fdbbcae - sha256: ec71f97c332a7d328ae038990b8090cbfa772f82845b5d2233defd167b7cc5ac + md5: b49f7b291e15494aafb0a7d74806f337 + sha256: 08315dc2e61766a39219b2d82685fc25a56b2817acf84d5b390176080eaacf99 category: main optional: false - name: webencodings @@ -16654,11 +16337,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=2.6' - url: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda hash: - md5: daf5160ff9cde3a468556965329085b9 - sha256: 2adf9bd5482802837bc8814cbe28d7b2a4cbd2e2c52e381329eaa283b3ed1944 + md5: 2841eb5bfc75ce15e9a0054b98dcd64d + sha256: 19ff205e138bb056a46f9e3839935a2e60bd1cf01c8241a5e172a422fed4f9c6 category: main optional: false - name: webencodings @@ -16666,11 +16349,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=2.6' - url: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda hash: - md5: daf5160ff9cde3a468556965329085b9 - sha256: 2adf9bd5482802837bc8814cbe28d7b2a4cbd2e2c52e381329eaa283b3ed1944 + md5: 2841eb5bfc75ce15e9a0054b98dcd64d + sha256: 19ff205e138bb056a46f9e3839935a2e60bd1cf01c8241a5e172a422fed4f9c6 category: main optional: false - name: websocket-client @@ -16678,11 +16361,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda hash: - md5: f372c576b8774922da83cda2b12f9d29 - sha256: 44a5e3b97feef24cd719f7851cca9af9799dc9c17d3e0298d5856baab2d682f5 + md5: 84f8f77f0a9c6ef401ee96611745da8f + sha256: 1dd84764424ffc82030c19ad70607e6f9e3b9cb8e633970766d697185652053e category: main optional: false - name: websocket-client @@ -16690,61 +16373,61 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/websocket-client-1.8.0-pyhd8ed1ab_1.conda hash: - md5: f372c576b8774922da83cda2b12f9d29 - sha256: 44a5e3b97feef24cd719f7851cca9af9799dc9c17d3e0298d5856baab2d682f5 + md5: 84f8f77f0a9c6ef401ee96611745da8f + sha256: 1dd84764424ffc82030c19ad70607e6f9e3b9cb8e633970766d697185652053e category: main optional: false - name: werkzeug - version: 3.0.4 + version: 3.1.3 manager: conda platform: linux-64 dependencies: markupsafe: '>=2.1.1' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.0.4-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhd8ed1ab_1.conda hash: - md5: 28753b434f2090f174d0c35ea629cc24 - sha256: 05cc8f76cb7b274ab1c78a1a8d421d1c084421e612829c33ce32af4e06039a92 + md5: 0a9b57c159d56b508613cc39022c1b9e + sha256: cd9a603beae0b237be7d9dfae8ae0b36ad62666ac4bb073969bce7da6f55157c category: main optional: false - name: werkzeug - version: 3.0.4 + version: 3.1.3 manager: conda platform: osx-arm64 dependencies: + python: '>=3.9' markupsafe: '>=2.1.1' - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.0.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/werkzeug-3.1.3-pyhd8ed1ab_1.conda hash: - md5: 28753b434f2090f174d0c35ea629cc24 - sha256: 05cc8f76cb7b274ab1c78a1a8d421d1c084421e612829c33ce32af4e06039a92 + md5: 0a9b57c159d56b508613cc39022c1b9e + sha256: cd9a603beae0b237be7d9dfae8ae0b36ad62666ac4bb073969bce7da6f55157c category: main optional: false - name: wheel - version: 0.44.0 + version: 0.45.1 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda hash: - md5: d44e3b085abcaef02983c6305b84b584 - sha256: d828764736babb4322b8102094de38074dedfc71f5ff405c9dfee89191c14ebc + md5: 75cb7132eb58d97896e173ef12ac9986 + sha256: 1b34021e815ff89a4d902d879c3bd2040bc1bd6169b32e9427497fa05c55f1ce category: main optional: false - name: wheel - version: 0.44.0 + version: 0.45.1 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda hash: - md5: d44e3b085abcaef02983c6305b84b584 - sha256: d828764736babb4322b8102094de38074dedfc71f5ff405c9dfee89191c14ebc + md5: 75cb7132eb58d97896e173ef12ac9986 + sha256: 1b34021e815ff89a4d902d879c3bd2040bc1bd6169b32e9427497fa05c55f1ce category: main optional: false - name: widgetsnbextension @@ -16752,11 +16435,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_1.conda hash: - md5: 6372cd99502721bd7499f8d16b56268d - sha256: d155adc10f8c96f76d4468dbe37b33b4334dadf5cd4a95841aa009ca9bced5fa + md5: 237db148cc37a466e4222d589029b53e + sha256: a750202ae2a31d8e5ee5a5c127fcc7fa783cd0fbedbc0bf1ab549a109881fa9f category: main optional: false - name: widgetsnbextension @@ -16764,11 +16447,11 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/widgetsnbextension-4.0.13-pyhd8ed1ab_1.conda hash: - md5: 6372cd99502721bd7499f8d16b56268d - sha256: d155adc10f8c96f76d4468dbe37b33b4334dadf5cd4a95841aa009ca9bced5fa + md5: 237db148cc37a466e4222d589029b53e + sha256: a750202ae2a31d8e5ee5a5c127fcc7fa783cd0fbedbc0bf1ab549a109881fa9f category: main optional: false - name: wrapt @@ -16786,17 +16469,29 @@ package: category: main optional: false - name: wrapt - version: 1.16.0 + version: 1.17.0 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/wrapt-1.16.0-py39h06df861_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/wrapt-1.17.0-py39hf3bc14e_0.conda + hash: + md5: 267feb90eff089879ef180c1edc1db29 + sha256: 2710edaa54bdf9face470ecc80542330bb352b073b4cf99cf6a2300ef4419750 + category: main + optional: false +- name: x265 + version: '3.5' + manager: conda + platform: osx-arm64 + dependencies: + libcxx: '>=12.0.1' + url: https://conda.anaconda.org/conda-forge/osx-arm64/x265-3.5-hbc6ce65_3.tar.bz2 hash: - md5: 5aff63126e8cc92563f3026e7e46132c - sha256: 58b41539406c9f6c90536a128cb1b548fd45311c072f25d7e3b778d4e96b8dc0 + md5: b1f7f2780feffe310b068c021e8ff9b2 + sha256: 2fed6987dba7dee07bd9adc1a6f8e6c699efb851431bcb6ebad7de196e87841d category: main optional: false - name: xerces-c @@ -17032,10 +16727,10 @@ package: platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_1.conda hash: - md5: 156c91e778c1d4d57b709f8c5333fd06 - sha256: 2dd2825b5a246461a95a0affaf7e1d459f7cc0ae68ad2dd8aab360c2e5859488 + md5: c79cea50b258f652010cb6c8d81591b5 + sha256: 5f8757092fc985d7586f2659505ec28757c05fd65d8d6ae549a5cec7e3376977 category: main optional: false - name: xyzservices @@ -17044,33 +16739,97 @@ package: platform: osx-arm64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_1.conda hash: - md5: 156c91e778c1d4d57b709f8c5333fd06 - sha256: 2dd2825b5a246461a95a0affaf7e1d459f7cc0ae68ad2dd8aab360c2e5859488 + md5: c79cea50b258f652010cb6c8d81591b5 + sha256: 5f8757092fc985d7586f2659505ec28757c05fd65d8d6ae549a5cec7e3376977 category: main optional: false - name: xz - version: 5.2.6 + version: 5.6.3 manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + liblzma: 5.6.3 + liblzma-devel: 5.6.3 + xz-gpl-tools: 5.6.3 + xz-tools: 5.6.3 + url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda hash: - md5: 2161070d867d1b1204ea749c8eec4ef0 - sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 + md5: 62aae173382a8aae284726353c6a6a24 + sha256: 9cef529dcff25222427c9d90b9fc376888a59e138794b4336bbcd3331a5eea22 category: main optional: false - name: xz - version: 5.2.6 + version: 5.6.3 manager: conda platform: osx-arm64 - dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.2.6-h57fd34a_0.tar.bz2 + dependencies: + __osx: '>=11.0' + liblzma: 5.6.3 + liblzma-devel: 5.6.3 + xz-gpl-tools: 5.6.3 + xz-tools: 5.6.3 + url: https://conda.anaconda.org/conda-forge/osx-arm64/xz-5.6.3-h9a6d368_1.conda + hash: + md5: 1d79c34d99f1e839a06b4631df091b64 + sha256: 84f9405312032638a7c6249573c8f50423c314c8a4d149b34b720caecc0dc83c + category: main + optional: false +- name: xz-gpl-tools + version: 5.6.3 + manager: conda + platform: linux-64 + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + liblzma: 5.6.3 + url: https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda + hash: + md5: f529917bab7862aaad6867bf2ea47a99 + sha256: 4e104b7c75c2f26a96032a1c6cda51430da1dea318c74f9e3568902b2f5030e1 + category: main + optional: false +- name: xz-gpl-tools + version: 5.6.3 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + liblzma: 5.6.3 + url: https://conda.anaconda.org/conda-forge/osx-arm64/xz-gpl-tools-5.6.3-h9a6d368_1.conda + hash: + md5: cf05cc17aa7eb2ff843ca5c45d63a324 + sha256: 98f71ea5d19c9cf4daed3b26a5102862baf8c63210f039e305f283fe399554b0 + category: main + optional: false +- name: xz-tools + version: 5.6.3 + manager: conda + platform: linux-64 + dependencies: + __glibc: '>=2.17,<3.0.a0' + libgcc: '>=13' + liblzma: 5.6.3 + url: https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda + hash: + md5: de3f31a6eed01bc2b8c7dcad07ad9034 + sha256: 6e80f838096345c35e8755b827814c083dd0274594006d6f76bff71bc969c3b8 + category: main + optional: false +- name: xz-tools + version: 5.6.3 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=11.0' + liblzma: 5.6.3 + url: https://conda.anaconda.org/conda-forge/osx-arm64/xz-tools-5.6.3-h39f12f2_1.conda hash: - md5: 39c6b54e94014701dd157f4f576ed211 - sha256: 59d78af0c3e071021cfe82dc40134c19dab8cdf804324b62940f5c8cd71803ec + md5: 0fea5aff7b3a33856288c26326d937f7 + sha256: b785955dd3d5eb1b00e3f1b1fbc3a9bf14276b2c0a950d0735a503d9abea7b5d category: main optional: false - name: yaml @@ -17097,7 +16856,7 @@ package: category: main optional: false - name: yarl - version: 1.15.2 + version: 1.18.3 manager: conda platform: linux-64 dependencies: @@ -17105,30 +16864,30 @@ package: idna: '>=2.0' libgcc: '>=13' multidict: '>=4.0' - propcache: '>=0.2.0' + propcache: '>=0.2.1' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.15.2-py39h8cd3c5a_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.18.3-py39h8cd3c5a_0.conda hash: - md5: fe2cac0e053f9155af479676f58beeb5 - sha256: da8fe71f583c052fa262f807de233d62cda2cee7fa43b65b749d253715a4ade2 + md5: b3dedbdf10aa0c2f7f33894b0b195421 + sha256: 0289e04cba3c013ecc6d8ded75f876105a3c0c006b3385b40d20093e04e28347 category: main optional: false - name: yarl - version: 1.15.2 + version: 1.18.3 manager: conda platform: osx-arm64 dependencies: __osx: '>=11.0' idna: '>=2.0' multidict: '>=4.0' - propcache: '>=0.2.0' + propcache: '>=0.2.1' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/yarl-1.15.2-py39h57695bc_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/yarl-1.18.3-py39hf3bc14e_0.conda hash: - md5: c426809986f74e266b6cae7c87d8a206 - sha256: 5c7f619a5a86ebb1239d162f9679d016fabb56cbd45d1ca46cd167f6f82b82bf + md5: 555e291881fbb222c1456cf2f58c14a7 + sha256: 3d23bfb073567fb1d88c5483d7b8c05a31f6221dd1e2ec9e4892aa354ad4817b category: main optional: false - name: zeromq @@ -17141,10 +16900,10 @@ package: libgcc: '>=13' libsodium: '>=1.0.20,<1.0.21.0a0' libstdcxx: '>=13' - url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_6.conda + url: https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda hash: - md5: 113506c8d2d558e733f5c38f6bf08c50 - sha256: e67288b1c98a31ee58a5c07bdd873dbe08e75f752e1ad605d5e8c0697339903e + md5: 3947a35e916fcc6b9825449affbf4214 + sha256: a4dc72c96848f764bb5a5176aa93dd1e9b9e52804137b99daeebba277b31ea10 category: main optional: false - name: zeromq @@ -17154,12 +16913,12 @@ package: dependencies: __osx: '>=11.0' krb5: '>=1.21.3,<1.22.0a0' - libcxx: '>=17' + libcxx: '>=18' libsodium: '>=1.0.20,<1.0.21.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-h9f5b81c_6.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/zeromq-4.3.5-hc1bb282_7.conda hash: - md5: 84121ef1717cdfbecedeae70142706cc - sha256: 5c5061c976141eccbbb2aec21483ddd10fd1df4fd9bcf638e3fd57b2bd85721f + md5: f7e6b65943cb73bce0143737fded08f1 + sha256: 9e585569fe2e7d3bea71972cd4b9f06b1a7ab8fa7c5139f92a31cbceecf25a8a category: main optional: false - name: zict @@ -17167,11 +16926,11 @@ package: manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda hash: - md5: cf30c2c15b82aacb07f9c09e28ff2275 - sha256: 3d65c081514569ab3642ba7e6c2a6b4615778b596db6b1c82ee30a2d912539e5 + md5: e52c2ef711ccf31bb7f70ca87d144b9e + sha256: 5488542dceeb9f2874e726646548ecc5608060934d6f9ceaa7c6a48c61f9cc8d category: main optional: false - name: zict @@ -17179,35 +16938,35 @@ package: manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda hash: - md5: cf30c2c15b82aacb07f9c09e28ff2275 - sha256: 3d65c081514569ab3642ba7e6c2a6b4615778b596db6b1c82ee30a2d912539e5 + md5: e52c2ef711ccf31bb7f70ca87d144b9e + sha256: 5488542dceeb9f2874e726646548ecc5608060934d6f9ceaa7c6a48c61f9cc8d category: main optional: false - name: zipp - version: 3.20.2 + version: 3.21.0 manager: conda platform: linux-64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda hash: - md5: 4daaed111c05672ae669f7036ee5bba3 - sha256: 1e84fcfa41e0afdd87ff41e6fbb719c96a0e098c1f79be342293ab0bd8dea322 + md5: 0c3cc595284c5e8f0f9900a9b228a332 + sha256: 567c04f124525c97a096b65769834b7acb047db24b15a56888a322bf3966c3e1 category: main optional: false - name: zipp - version: 3.20.2 + version: 3.21.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda hash: - md5: 4daaed111c05672ae669f7036ee5bba3 - sha256: 1e84fcfa41e0afdd87ff41e6fbb719c96a0e098c1f79be342293ab0bd8dea322 + md5: 0c3cc595284c5e8f0f9900a9b228a332 + sha256: 567c04f124525c97a096b65769834b7acb047db24b15a56888a322bf3966c3e1 category: main optional: false - name: zlib @@ -17435,29 +17194,29 @@ package: category: main optional: false - name: azure-core - version: 1.31.0 + version: 1.32.0 manager: pip platform: linux-64 dependencies: requests: '>=2.21.0' six: '>=1.11.0' typing-extensions: '>=4.6.0' - url: https://files.pythonhosted.org/packages/01/8e/fcb6a77d3029d2a7356f38dbc77cf7daa113b81ddab76b5593d23321e44c/azure_core-1.31.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/39/83/325bf5e02504dbd8b4faa98197a44cdf8a325ef259b48326a2b6f17f8383/azure_core-1.32.0-py3-none-any.whl hash: - sha256: 22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd + sha256: eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4 category: main optional: false - name: azure-core - version: 1.31.0 + version: 1.32.0 manager: pip platform: osx-arm64 dependencies: requests: '>=2.21.0' six: '>=1.11.0' typing-extensions: '>=4.6.0' - url: https://files.pythonhosted.org/packages/01/8e/fcb6a77d3029d2a7356f38dbc77cf7daa113b81ddab76b5593d23321e44c/azure_core-1.31.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/39/83/325bf5e02504dbd8b4faa98197a44cdf8a325ef259b48326a2b6f17f8383/azure_core-1.32.0-py3-none-any.whl hash: - sha256: 22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd + sha256: eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4 category: main optional: false - name: azure-datalake-store @@ -17517,7 +17276,7 @@ package: category: main optional: false - name: azure-storage-blob - version: 12.23.1 + version: 12.24.0 manager: pip platform: linux-64 dependencies: @@ -17525,13 +17284,13 @@ package: cryptography: '>=2.1.4' typing-extensions: '>=4.6.0' isodate: '>=0.6.1' - url: https://files.pythonhosted.org/packages/df/bf/f19dd2261dd6193aa53375fcd58929d613e45d14bcdb778567d1fd5e2d6e/azure_storage_blob-12.23.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/e2/f8/ef0f76f8c424bedd20c685409836ddfb42ac76fd8a0f21c3c3659cf7207d/azure_storage_blob-12.24.0-py3-none-any.whl hash: - sha256: 1c2238aa841d1545f42714a5017c010366137a44a0605da2d45f770174bfc6b4 + sha256: 4f0bb4592ea79a2d986063696514c781c9e62be240f09f6397986e01755bc071 category: main optional: false - name: azure-storage-blob - version: 12.23.1 + version: 12.24.0 manager: pip platform: osx-arm64 dependencies: @@ -17539,9 +17298,9 @@ package: cryptography: '>=2.1.4' typing-extensions: '>=4.6.0' isodate: '>=0.6.1' - url: https://files.pythonhosted.org/packages/df/bf/f19dd2261dd6193aa53375fcd58929d613e45d14bcdb778567d1fd5e2d6e/azure_storage_blob-12.23.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/e2/f8/ef0f76f8c424bedd20c685409836ddfb42ac76fd8a0f21c3c3659cf7207d/azure_storage_blob-12.24.0-py3-none-any.whl hash: - sha256: 1c2238aa841d1545f42714a5017c010366137a44a0605da2d45f770174bfc6b4 + sha256: 4f0bb4592ea79a2d986063696514c781c9e62be240f09f6397986e01755bc071 category: main optional: false - name: backports.tarfile @@ -17617,27 +17376,27 @@ package: category: main optional: false - name: croniter - version: 3.0.3 + version: 5.0.1 manager: pip platform: linux-64 dependencies: python-dateutil: '*' pytz: '>2021.1' - url: https://files.pythonhosted.org/packages/93/6a/f2f68e0f9cf702b6d055ab53cab0d8c100f04e86228ca500a8ca9de94b58/croniter-3.0.3-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/3c/68/34c3d74d2af6ea98ff8a0b50d149cff26e88a3f09817121d1186e9185e97/croniter-5.0.1-py2.py3-none-any.whl hash: - sha256: b3bd11f270dc54ccd1f2397b813436015a86d30ffc5a7a9438eec1ed916f2101 + sha256: eb28439742291f6c10b181df1a5ecf421208b1fc62ef44501daec1780a0b09e9 category: main optional: false - name: croniter - version: 3.0.3 + version: 5.0.1 manager: pip platform: osx-arm64 dependencies: python-dateutil: '*' pytz: '>2021.1' - url: https://files.pythonhosted.org/packages/93/6a/f2f68e0f9cf702b6d055ab53cab0d8c100f04e86228ca500a8ca9de94b58/croniter-3.0.3-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/3c/68/34c3d74d2af6ea98ff8a0b50d149cff26e88a3f09817121d1186e9185e97/croniter-5.0.1-py2.py3-none-any.whl hash: - sha256: b3bd11f270dc54ccd1f2397b813436015a86d30ffc5a7a9438eec1ed916f2101 + sha256: eb28439742291f6c10b181df1a5ecf421208b1fc62ef44501daec1780a0b09e9 category: main optional: false - name: dacite @@ -17809,53 +17568,53 @@ package: category: main optional: false - name: duckdb - version: 1.1.2 + version: 1.1.3 manager: pip platform: linux-64 dependencies: {} - url: https://files.pythonhosted.org/packages/48/9a/1029a2ec5b6755341372834675dd511c4f49e634d5ef312fa8e671c5b3f9/duckdb-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + url: https://files.pythonhosted.org/packages/e3/9e/e3995491d4c3bc6b3e3e0f3bad55902225c09f571e296c1eb093f33c5c75/duckdb-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl hash: - sha256: 7ca967c5a57b1d0cb0fd5e539ab24110e5a59dcbedd365bb2dc80533d6e44a8d + sha256: 80158f4c7c7ada46245837d5b6869a336bbaa28436fbb0537663fa324a2750cd category: main optional: false - name: duckdb - version: 1.1.2 + version: 1.1.3 manager: pip platform: osx-arm64 dependencies: {} - url: https://files.pythonhosted.org/packages/75/6a/ef6cf334680543f1d9ead39fbea8950bf4cd91c4612dd32c33ac4c82fe55/duckdb-1.1.2-cp39-cp39-macosx_12_0_arm64.whl + url: https://files.pythonhosted.org/packages/e5/c4/8a0f629aadfa8e09574e70ceb2d4fa2e81dc36b67d353806e14474983403/duckdb-1.1.3-cp39-cp39-macosx_12_0_arm64.whl hash: - sha256: f87edaf20001530e63a4f7bda13b55dc3152d7171226915f2bf34e0813c8759e + sha256: 09c68522c30fc38fc972b8a75e9201616b96ae6da3444585f14cf0d116008c95 category: main optional: false - name: flyteidl - version: 1.13.4 + version: 1.14.0 manager: pip platform: linux-64 dependencies: googleapis-common-protos: '*' protoc-gen-openapiv2: '*' protobuf: '>=4.21.1' - url: https://files.pythonhosted.org/packages/04/c1/a5070375f050b49aa119ae9dd85aded7c6be42633a878cc57e1026ebe655/flyteidl-1.13.4-py3-none-any.whl + url: https://files.pythonhosted.org/packages/f3/a1/40b8b4e31983f6449764625c8ea3e3752ac2588ff599d67297697e064544/flyteidl-1.14.0-py3-none-any.whl hash: - sha256: cdd254b274e4d6b0817cd306d8d81dd8ba1d9abcc6f48a6452054d48d0b8fdf7 + sha256: 03d7559fba51bc123e969074a50e17f3ad110a328cd9178c7b7e65ac23a6c956 category: main optional: false - name: flyteidl - version: 1.13.4 + version: 1.14.0 manager: pip platform: osx-arm64 dependencies: googleapis-common-protos: '*' protoc-gen-openapiv2: '*' protobuf: '>=4.21.1' - url: https://files.pythonhosted.org/packages/04/c1/a5070375f050b49aa119ae9dd85aded7c6be42633a878cc57e1026ebe655/flyteidl-1.13.4-py3-none-any.whl + url: https://files.pythonhosted.org/packages/f3/a1/40b8b4e31983f6449764625c8ea3e3752ac2588ff599d67297697e064544/flyteidl-1.14.0-py3-none-any.whl hash: - sha256: cdd254b274e4d6b0817cd306d8d81dd8ba1d9abcc6f48a6452054d48d0b8fdf7 + sha256: 03d7559fba51bc123e969074a50e17f3ad110a328cd9178c7b7e65ac23a6c956 category: main optional: false - name: flytekit - version: 1.14.0b1 + version: 1.15.0a1 manager: pip platform: linux-64 dependencies: @@ -17867,12 +17626,12 @@ package: diskcache: '>=5.2.1' docker: '>=4.0.0' docstring-parser: '>=0.9.0' - flyteidl: '>=1.13.4' + flyteidl: '>=1.13.9' fsspec: '>=2023.3.0' gcsfs: '>=2023.3.0' googleapis-common-protos: '>=1.57' - grpcio: '*' - grpcio-status: '*' + grpcio: <1.68.0 || >1.68.0,<1.68.1 || >1.68.1 + grpcio-status: <1.68.0 || >1.68.0,<1.68.1 || >1.68.1 importlib-metadata: '*' joblib: '*' jsonlines: '*' @@ -17881,7 +17640,7 @@ package: markdown-it-py: '*' marshmallow-enum: '*' marshmallow-jsonschema: '>=0.12.0' - mashumaro: '>=3.11' + mashumaro: '>=3.15' msgpack: '>=1.1.0' protobuf: '!=4.25.0' pygments: '*' @@ -17895,13 +17654,13 @@ package: statsd: '>=3.0.0' typing-extensions: '*' urllib3: '>=1.22' - url: https://files.pythonhosted.org/packages/33/01/50bacac67fad78c133fd37ba3734b1409f295fbb1730300bab65b7565108/flytekit-1.14.0b1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/96/89/7c7952ed144e9ff2e018dd0b83b30143de2b884c69281cb7349e7163edd8/flytekit-1.15.0a1-py3-none-any.whl hash: - sha256: bdb0299e309f15f66bcde33e7a32c193473b120132754a2bfb95baa3d04c3ab1 + sha256: aabd21d397ed1cd8abd61466488192fcd88278ef83f4c00cf29afa1bf99b3eb2 category: main optional: false - name: flytekit - version: 1.14.0b1 + version: 1.15.0a1 manager: pip platform: osx-arm64 dependencies: @@ -17913,12 +17672,12 @@ package: diskcache: '>=5.2.1' docker: '>=4.0.0' docstring-parser: '>=0.9.0' - flyteidl: '>=1.13.4' + flyteidl: '>=1.13.9' fsspec: '>=2023.3.0' gcsfs: '>=2023.3.0' googleapis-common-protos: '>=1.57' - grpcio: '*' - grpcio-status: '*' + grpcio: <1.68.0 || >1.68.0,<1.68.1 || >1.68.1 + grpcio-status: <1.68.0 || >1.68.0,<1.68.1 || >1.68.1 importlib-metadata: '*' joblib: '*' jsonlines: '*' @@ -17927,7 +17686,7 @@ package: markdown-it-py: '*' marshmallow-enum: '*' marshmallow-jsonschema: '>=0.12.0' - mashumaro: '>=3.11' + mashumaro: '>=3.15' msgpack: '>=1.1.0' protobuf: '!=4.25.0' pygments: '*' @@ -17941,35 +17700,35 @@ package: statsd: '>=3.0.0' typing-extensions: '*' urllib3: '>=1.22' - url: https://files.pythonhosted.org/packages/33/01/50bacac67fad78c133fd37ba3734b1409f295fbb1730300bab65b7565108/flytekit-1.14.0b1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/96/89/7c7952ed144e9ff2e018dd0b83b30143de2b884c69281cb7349e7163edd8/flytekit-1.15.0a1-py3-none-any.whl hash: - sha256: bdb0299e309f15f66bcde33e7a32c193473b120132754a2bfb95baa3d04c3ab1 + sha256: aabd21d397ed1cd8abd61466488192fcd88278ef83f4c00cf29afa1bf99b3eb2 category: main optional: false - name: flytekitplugins-deck-standard - version: 1.13.8 + version: 1.14.0 manager: pip platform: linux-64 dependencies: flytekit: '*' - url: https://files.pythonhosted.org/packages/d8/70/01c7ca9a4d9fbb6cae8fdb24d06cef9193a4077f43b0df2993203fd62d4d/flytekitplugins_deck_standard-1.13.8-py3-none-any.whl + url: https://files.pythonhosted.org/packages/c8/ca/f4a2dc3a9336c3f1f37e7a481ac8f12f0f5d43327b01505e20086a1cb8ab/flytekitplugins_deck_standard-1.14.0-py3-none-any.whl hash: - sha256: a0369080b47ac14d0c54e441995f10f739a4ee7b3d6cd2685a3cc9911642ab15 + sha256: 9ce5da86106a982292814ade38d21be0215da093f02a4bf370b2746eb8862df4 category: main optional: false - name: flytekitplugins-deck-standard - version: 1.13.8 + version: 1.14.0 manager: pip platform: osx-arm64 dependencies: flytekit: '*' - url: https://files.pythonhosted.org/packages/d8/70/01c7ca9a4d9fbb6cae8fdb24d06cef9193a4077f43b0df2993203fd62d4d/flytekitplugins_deck_standard-1.13.8-py3-none-any.whl + url: https://files.pythonhosted.org/packages/c8/ca/f4a2dc3a9336c3f1f37e7a481ac8f12f0f5d43327b01505e20086a1cb8ab/flytekitplugins_deck_standard-1.14.0-py3-none-any.whl hash: - sha256: a0369080b47ac14d0c54e441995f10f739a4ee7b3d6cd2685a3cc9911642ab15 + sha256: 9ce5da86106a982292814ade38d21be0215da093f02a4bf370b2746eb8862df4 category: main optional: false - name: flytekitplugins-kfpytorch - version: 1.13.8 + version: 1.14.0 manager: pip platform: linux-64 dependencies: @@ -17977,13 +17736,13 @@ package: flyteidl: '>=1.5.1' flytekit: '>=1.6.1' kubernetes: '*' - url: https://files.pythonhosted.org/packages/a5/ef/1ef72a88b1f87a782ce1edc54570951c9992f0136bfcfba65d701df12235/flytekitplugins_kfpytorch-1.13.8-py3-none-any.whl + url: https://files.pythonhosted.org/packages/e3/d0/0fa5483ffdd7a6b37db86e2dbecc4140e3cd1718ef84390c186c62d8fd57/flytekitplugins_kfpytorch-1.14.0-py3-none-any.whl hash: - sha256: 42e5a1fa42fc5c833b0881ff76508a79359cbdbd0a0a3e50c4fc5f2ebac8bf9f + sha256: d265f46b054d144203470852a11a21db8d887bea714a58dc4b5624943c0fa624 category: main optional: false - name: flytekitplugins-kfpytorch - version: 1.13.8 + version: 1.14.0 manager: pip platform: osx-arm64 dependencies: @@ -17991,69 +17750,69 @@ package: flyteidl: '>=1.5.1' flytekit: '>=1.6.1' kubernetes: '*' - url: https://files.pythonhosted.org/packages/a5/ef/1ef72a88b1f87a782ce1edc54570951c9992f0136bfcfba65d701df12235/flytekitplugins_kfpytorch-1.13.8-py3-none-any.whl + url: https://files.pythonhosted.org/packages/e3/d0/0fa5483ffdd7a6b37db86e2dbecc4140e3cd1718ef84390c186c62d8fd57/flytekitplugins_kfpytorch-1.14.0-py3-none-any.whl hash: - sha256: 42e5a1fa42fc5c833b0881ff76508a79359cbdbd0a0a3e50c4fc5f2ebac8bf9f + sha256: d265f46b054d144203470852a11a21db8d887bea714a58dc4b5624943c0fa624 category: main optional: false - name: flytekitplugins-sqlalchemy - version: 1.13.8 + version: 1.14.0 manager: pip platform: linux-64 dependencies: flytekit: '>=1.3.0b2,<2.0.0' sqlalchemy: '>=1.4.7' pandas: '*' - url: https://files.pythonhosted.org/packages/3e/0e/b557a3809e678fe1c1d76237f491b2dcdcaf81bc0348dcfe05260460df5d/flytekitplugins_sqlalchemy-1.13.8-py3-none-any.whl + url: https://files.pythonhosted.org/packages/a0/a7/863d30c3c80d28b620a3266decba183b3dfa244480974c27f44be7e78234/flytekitplugins_sqlalchemy-1.14.0-py3-none-any.whl hash: - sha256: 1d79b9cafbcb93cb9d6cc9e8e0ca6e1e4a1c46a33940586e88984aa84cdbec01 + sha256: b27eed554648c9d25863ab741642cf2f0174f2a8097b82520e8bea919b76b928 category: main optional: false - name: flytekitplugins-sqlalchemy - version: 1.13.8 + version: 1.14.0 manager: pip platform: osx-arm64 dependencies: flytekit: '>=1.3.0b2,<2.0.0' sqlalchemy: '>=1.4.7' pandas: '*' - url: https://files.pythonhosted.org/packages/3e/0e/b557a3809e678fe1c1d76237f491b2dcdcaf81bc0348dcfe05260460df5d/flytekitplugins_sqlalchemy-1.13.8-py3-none-any.whl + url: https://files.pythonhosted.org/packages/a0/a7/863d30c3c80d28b620a3266decba183b3dfa244480974c27f44be7e78234/flytekitplugins_sqlalchemy-1.14.0-py3-none-any.whl hash: - sha256: 1d79b9cafbcb93cb9d6cc9e8e0ca6e1e4a1c46a33940586e88984aa84cdbec01 + sha256: b27eed554648c9d25863ab741642cf2f0174f2a8097b82520e8bea919b76b928 category: main optional: false - name: gcsfs - version: 2024.9.0.post1 + version: 2024.10.0 manager: pip platform: linux-64 dependencies: aiohttp: <4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1 decorator: '>4.1.2' - fsspec: 2024.9.0 + fsspec: 2024.10.0 google-auth: '>=1.2' google-auth-oauthlib: '*' google-cloud-storage: '*' requests: '*' - url: https://files.pythonhosted.org/packages/72/1d/37ab60da39d3b782b0cf7770ba8c9071be8bb2aee8bc01b6d350c28b51b3/gcsfs-2024.9.0.post1-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/dc/96/d60e835fb7d10166c77aef0c1fa30e634153c03a0f486786977b95f88fde/gcsfs-2024.10.0-py2.py3-none-any.whl hash: - sha256: f3ab9d3bedc45da8cf40baed7c3a1e1694e8f599160d9138d78f0ef25e4a3ca1 + sha256: bb2d23547e61203ea2dda5fa6c4b91a0c34b74ebe8bb6ab1926f6c33381bceb2 category: main optional: false - name: gcsfs - version: 2024.9.0.post1 + version: 2024.10.0 manager: pip platform: osx-arm64 dependencies: aiohttp: <4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1 decorator: '>4.1.2' - fsspec: 2024.9.0 + fsspec: 2024.10.0 google-auth: '>=1.2' google-auth-oauthlib: '*' google-cloud-storage: '*' requests: '*' - url: https://files.pythonhosted.org/packages/72/1d/37ab60da39d3b782b0cf7770ba8c9071be8bb2aee8bc01b6d350c28b51b3/gcsfs-2024.9.0.post1-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/dc/96/d60e835fb7d10166c77aef0c1fa30e634153c03a0f486786977b95f88fde/gcsfs-2024.10.0-py2.py3-none-any.whl hash: - sha256: f3ab9d3bedc45da8cf40baed7c3a1e1694e8f599160d9138d78f0ef25e4a3ca1 + sha256: bb2d23547e61203ea2dda5fa6c4b91a0c34b74ebe8bb6ab1926f6c33381bceb2 category: main optional: false - name: google-cloud @@ -18077,7 +17836,7 @@ package: category: main optional: false - name: google-cloud-storage - version: 2.18.2 + version: 2.19.0 manager: pip platform: linux-64 dependencies: @@ -18087,13 +17846,13 @@ package: google-resumable-media: '>=2.7.2' requests: '>=2.18.0,<3.0.0dev' google-crc32c: '>=1.0,<2.0dev' - url: https://files.pythonhosted.org/packages/fc/da/95db7bd4f0bd1644378ac1702c565c0210b004754d925a74f526a710c087/google_cloud_storage-2.18.2-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl hash: - sha256: 97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 + sha256: aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba category: main optional: false - name: google-cloud-storage - version: 2.18.2 + version: 2.19.0 manager: pip platform: osx-arm64 dependencies: @@ -18103,9 +17862,9 @@ package: google-resumable-media: '>=2.7.2' requests: '>=2.18.0,<3.0.0dev' google-crc32c: '>=1.0,<2.0dev' - url: https://files.pythonhosted.org/packages/fc/da/95db7bd4f0bd1644378ac1702c565c0210b004754d925a74f526a710c087/google_cloud_storage-2.18.2-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl hash: - sha256: 97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 + sha256: aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba category: main optional: false - name: htmlmin @@ -18275,27 +18034,27 @@ package: category: main optional: false - name: jsonpickle - version: 3.3.0 + version: 4.0.0 manager: pip platform: linux-64 dependencies: {} - url: https://files.pythonhosted.org/packages/71/1f/224e27180204282c1ea378b86944585616c1978544b9f5277cf907fdb26c/jsonpickle-3.3.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/a1/64/815460f86d94c9e1431800a75061719824c6fef14d88a6117eba3126cd5b/jsonpickle-4.0.0-py3-none-any.whl hash: - sha256: 287c12143f35571ab00e224fa323aa4b090d5a7f086f5f494d7ee9c7eb1a380a + sha256: 53730b9e094bc41f540bfdd25eaf6e6cf43811590e9e1477abcec44b866ddcd9 category: main optional: false - name: jsonpickle - version: 3.3.0 + version: 4.0.0 manager: pip platform: osx-arm64 dependencies: {} - url: https://files.pythonhosted.org/packages/71/1f/224e27180204282c1ea378b86944585616c1978544b9f5277cf907fdb26c/jsonpickle-3.3.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/a1/64/815460f86d94c9e1431800a75061719824c6fef14d88a6117eba3126cd5b/jsonpickle-4.0.0-py3-none-any.whl hash: - sha256: 287c12143f35571ab00e224fa323aa4b090d5a7f086f5f494d7ee9c7eb1a380a + sha256: 53730b9e094bc41f540bfdd25eaf6e6cf43811590e9e1477abcec44b866ddcd9 category: main optional: false - name: keyring - version: 25.4.1 + version: 25.5.0 manager: pip platform: linux-64 dependencies: @@ -18305,13 +18064,13 @@ package: importlib-metadata: '>=4.11.4' secretstorage: '>=3.2' jeepney: '>=0.4.2' - url: https://files.pythonhosted.org/packages/83/25/e6d59e5f0a0508d0dca8bb98c7f7fd3772fc943ac3f53d5ab18a218d32c0/keyring-25.4.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/32/c9/353c156fa2f057e669106e5d6bcdecf85ef8d3536ce68ca96f18dc7b6d6f/keyring-25.5.0-py3-none-any.whl hash: - sha256: 5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf + sha256: e67f8ac32b04be4714b42fe84ce7dad9c40985b9ca827c592cc303e7c26d9741 category: main optional: false - name: keyring - version: 25.4.1 + version: 25.5.0 manager: pip platform: osx-arm64 dependencies: @@ -18319,9 +18078,9 @@ package: jaraco.functools: '*' jaraco.context: '*' importlib-metadata: '>=4.11.4' - url: https://files.pythonhosted.org/packages/83/25/e6d59e5f0a0508d0dca8bb98c7f7fd3772fc943ac3f53d5ab18a218d32c0/keyring-25.4.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/32/c9/353c156fa2f057e669106e5d6bcdecf85ef8d3536ce68ca96f18dc7b6d6f/keyring-25.5.0-py3-none-any.whl hash: - sha256: 5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf + sha256: e67f8ac32b04be4714b42fe84ce7dad9c40985b9ca827c592cc303e7c26d9741 category: main optional: false - name: llvmlite @@ -18389,25 +18148,25 @@ package: category: main optional: false - name: mashumaro - version: 3.13.1 + version: '3.15' manager: pip platform: linux-64 dependencies: typing-extensions: '>=4.1.0' - url: https://files.pythonhosted.org/packages/44/e4/23e8febb328c8b63b2f99083a3eec271e466d8d22b0726110143863b36e9/mashumaro-3.13.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/f9/59/595eabaa779c87a72d65864351e0fdd2359d7d73967d5ed9f2f0c6186fa3/mashumaro-3.15-py3-none-any.whl hash: - sha256: ad0a162b8f4ea232dadd2891d77ff20165b855b9d84610f36ac84462d4576aa0 + sha256: cdd45ef5a4d09860846a3ee37a4c2f5f4bc70eb158caa55648c4c99451ca6c4c category: main optional: false - name: mashumaro - version: 3.13.1 + version: '3.15' manager: pip platform: osx-arm64 dependencies: typing-extensions: '>=4.1.0' - url: https://files.pythonhosted.org/packages/44/e4/23e8febb328c8b63b2f99083a3eec271e466d8d22b0726110143863b36e9/mashumaro-3.13.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/f9/59/595eabaa779c87a72d65864351e0fdd2359d7d73967d5ed9f2f0c6186fa3/mashumaro-3.15-py3-none-any.whl hash: - sha256: ad0a162b8f4ea232dadd2891d77ff20165b855b9d84610f36ac84462d4576aa0 + sha256: cdd45ef5a4d09860846a3ee37a4c2f5f4bc70eb158caa55648c4c99451ca6c4c category: main optional: false - name: more-itertools @@ -18431,29 +18190,29 @@ package: category: main optional: false - name: msal - version: 1.31.0 + version: 1.31.1 manager: pip platform: linux-64 dependencies: requests: '>=2.0.0,<3' pyjwt: '>=1.0.0,<3' cryptography: '>=2.5,<46' - url: https://files.pythonhosted.org/packages/cd/40/0a5d743484e1ad00493bdffa8d10d7dbc6a51fec95290ad396e47e79fa43/msal-1.31.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/30/7c/489cd931a752d05753d730e848039f08f65f86237cf1b8724d0a1cbd700b/msal-1.31.1-py3-none-any.whl hash: - sha256: 96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 + sha256: 29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17 category: main optional: false - name: msal - version: 1.31.0 + version: 1.31.1 manager: pip platform: osx-arm64 dependencies: requests: '>=2.0.0,<3' pyjwt: '>=1.0.0,<3' cryptography: '>=2.5,<46' - url: https://files.pythonhosted.org/packages/cd/40/0a5d743484e1ad00493bdffa8d10d7dbc6a51fec95290ad396e47e79fa43/msal-1.31.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/30/7c/489cd931a752d05753d730e848039f08f65f86237cf1b8724d0a1cbd700b/msal-1.31.1-py3-none-any.whl hash: - sha256: 96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 + sha256: 29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17 category: main optional: false - name: msal-extensions @@ -18505,31 +18264,29 @@ package: category: main optional: false - name: patsy - version: 0.5.6 + version: 1.0.1 manager: pip platform: linux-64 dependencies: - six: '*' numpy: '>=1.4' - url: https://files.pythonhosted.org/packages/43/f3/1d311a09c34f14f5973bb0bb0dc3a6e007e1eda90b5492d082689936ca51/patsy-0.5.6-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/87/2b/b50d3d08ea0fc419c183a84210571eba005328efa62b6b98bc28e9ead32a/patsy-1.0.1-py2.py3-none-any.whl hash: - sha256: 19056886fd8fa71863fa32f0eb090267f21fb74be00f19f5c70b2e9d76c883c6 + sha256: 751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c category: main optional: false - name: patsy - version: 0.5.6 + version: 1.0.1 manager: pip platform: osx-arm64 dependencies: - six: '*' numpy: '>=1.4' - url: https://files.pythonhosted.org/packages/43/f3/1d311a09c34f14f5973bb0bb0dc3a6e007e1eda90b5492d082689936ca51/patsy-0.5.6-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/87/2b/b50d3d08ea0fc419c183a84210571eba005328efa62b6b98bc28e9ead32a/patsy-1.0.1-py2.py3-none-any.whl hash: - sha256: 19056886fd8fa71863fa32f0eb090267f21fb74be00f19f5c70b2e9d76c883c6 + sha256: 751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c category: main optional: false - name: perian - version: 0.2.9 + version: 0.2.12 manager: pip platform: linux-64 dependencies: @@ -18538,13 +18295,13 @@ package: python-dateutil: '>=2.8.2' toml: '>=0.10.2,<0.11.0' urllib3: '>=1.25.3' - url: https://files.pythonhosted.org/packages/0a/2c/03c2d33c51dc6ef456b9bb0976dbd518a41d966fd1c5bcc130b52759e026/perian-0.2.9-py3-none-any.whl + url: https://files.pythonhosted.org/packages/12/e1/4f4bfe09ddc30478b4b8bb2b6a58f6e703838aca11bc6ddca3904bde36c6/perian-0.2.12-py3-none-any.whl hash: - sha256: 51072e60cb886a7d33e29565e4bef7a0c6e465d8d090c78c5e643f8af0e97d26 + sha256: 1c6998255aa8b54a3eaceb462b0476bbf7c9a8693fa1b8fc7fa15bb95a121b25 category: main optional: false - name: perian - version: 0.2.9 + version: 0.2.12 manager: pip platform: osx-arm64 dependencies: @@ -18553,9 +18310,9 @@ package: python-dateutil: '>=2.8.2' toml: '>=0.10.2,<0.11.0' urllib3: '>=1.25.3' - url: https://files.pythonhosted.org/packages/0a/2c/03c2d33c51dc6ef456b9bb0976dbd518a41d966fd1c5bcc130b52759e026/perian-0.2.9-py3-none-any.whl + url: https://files.pythonhosted.org/packages/12/e1/4f4bfe09ddc30478b4b8bb2b6a58f6e703838aca11bc6ddca3904bde36c6/perian-0.2.12-py3-none-any.whl hash: - sha256: 51072e60cb886a7d33e29565e4bef7a0c6e465d8d090c78c5e643f8af0e97d26 + sha256: 1c6998255aa8b54a3eaceb462b0476bbf7c9a8693fa1b8fc7fa15bb95a121b25 category: main optional: false - name: phik @@ -18653,39 +18410,37 @@ package: category: main optional: false - name: pydata-sphinx-theme - version: 0.15.4 + version: 0.16.0 manager: pip platform: linux-64 dependencies: - sphinx: '>=5' + sphinx: '>=6.1' beautifulsoup4: '*' docutils: '!=0.17.0' - packaging: '*' babel: '*' pygments: '>=2.7' accessible-pygments: '*' typing-extensions: '*' - url: https://files.pythonhosted.org/packages/e7/d3/c622950d87a2ffd1654208733b5bd1c5645930014abed8f4c0d74863988b/pydata_sphinx_theme-0.15.4-py3-none-any.whl + url: https://files.pythonhosted.org/packages/ba/92/38f384061e1361fac7092c35e932c0e08026fb9080bf3fbf05f4c3bb6bda/pydata_sphinx_theme-0.16.0-py3-none-any.whl hash: - sha256: 2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6 + sha256: 18c810ee4e67e05281e371e156c1fb5bb0fa1f2747240461b225272f7d8d57d8 category: main optional: false - name: pydata-sphinx-theme - version: 0.15.4 + version: 0.16.0 manager: pip platform: osx-arm64 dependencies: - sphinx: '>=5' + sphinx: '>=6.1' beautifulsoup4: '*' docutils: '!=0.17.0' - packaging: '*' babel: '*' pygments: '>=2.7' accessible-pygments: '*' typing-extensions: '*' - url: https://files.pythonhosted.org/packages/e7/d3/c622950d87a2ffd1654208733b5bd1c5645930014abed8f4c0d74863988b/pydata_sphinx_theme-0.15.4-py3-none-any.whl + url: https://files.pythonhosted.org/packages/ba/92/38f384061e1361fac7092c35e932c0e08026fb9080bf3fbf05f4c3bb6bda/pydata_sphinx_theme-0.16.0-py3-none-any.whl hash: - sha256: 2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6 + sha256: 18c810ee4e67e05281e371e156c1fb5bb0fa1f2747240461b225272f7d8d57d8 category: main optional: false - name: pytimeparse @@ -18801,55 +18556,55 @@ package: category: main optional: false - name: rich-click - version: 1.8.3 + version: 1.8.5 manager: pip platform: linux-64 dependencies: click: '>=7' rich: '>=10.7' - typing-extensions: '*' - url: https://files.pythonhosted.org/packages/c6/ea/5a0c5a8e6532e971983d1b0fc99268eb66a10f489da35d9022ce01044191/rich_click-1.8.3-py3-none-any.whl + typing-extensions: '>=4' + url: https://files.pythonhosted.org/packages/aa/0b/e2de98c538c0ee9336211d260f88b7e69affab44969750aaca0b48a697c8/rich_click-1.8.5-py3-none-any.whl hash: - sha256: 636d9c040d31c5eee242201b5bf4f2d358bfae4db14bb22ec1cafa717cfd02cd + sha256: 0fab7bb5b66c15da17c210b4104277cd45f3653a7322e0098820a169880baee0 category: main optional: false - name: rich-click - version: 1.8.3 + version: 1.8.5 manager: pip platform: osx-arm64 dependencies: click: '>=7' rich: '>=10.7' - typing-extensions: '*' - url: https://files.pythonhosted.org/packages/c6/ea/5a0c5a8e6532e971983d1b0fc99268eb66a10f489da35d9022ce01044191/rich_click-1.8.3-py3-none-any.whl + typing-extensions: '>=4' + url: https://files.pythonhosted.org/packages/aa/0b/e2de98c538c0ee9336211d260f88b7e69affab44969750aaca0b48a697c8/rich_click-1.8.5-py3-none-any.whl hash: - sha256: 636d9c040d31c5eee242201b5bf4f2d358bfae4db14bb22ec1cafa717cfd02cd + sha256: 0fab7bb5b66c15da17c210b4104277cd45f3653a7322e0098820a169880baee0 category: main optional: false - name: s3fs - version: 2024.9.0 + version: 2024.10.0 manager: pip platform: linux-64 dependencies: aiobotocore: '>=2.5.4,<3.0.0' - fsspec: '>=2024.9.0,<2024.10.0' + fsspec: '>=2024.10.0,<2024.11.0' aiohttp: <4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1 - url: https://files.pythonhosted.org/packages/d7/af/add60ba3a0bb78d900f6d9365000c1b0e06c97284154e20f0bda02dbb717/s3fs-2024.9.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/99/44/bb9ff095ae7b1b6908480f683b6ca6b71c2105d343a5e5cb25334b01f5fa/s3fs-2024.10.0-py3-none-any.whl hash: - sha256: 3a7dc7acae4358af8e8dfb693e82a8477f9f2c847de5d44cf65fee75752eaca3 + sha256: 7a2025d60d5b1a6025726b3a5e292a8e5aa713abc3b16fd1f81735181f7bb282 category: main optional: false - name: s3fs - version: 2024.9.0 + version: 2024.10.0 manager: pip platform: osx-arm64 dependencies: aiobotocore: '>=2.5.4,<3.0.0' - fsspec: '>=2024.9.0,<2024.10.0' + fsspec: '>=2024.10.0,<2024.11.0' aiohttp: <4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1 - url: https://files.pythonhosted.org/packages/d7/af/add60ba3a0bb78d900f6d9365000c1b0e06c97284154e20f0bda02dbb717/s3fs-2024.9.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/99/44/bb9ff095ae7b1b6908480f683b6ca6b71c2105d343a5e5cb25334b01f5fa/s3fs-2024.10.0-py3-none-any.whl hash: - sha256: 3a7dc7acae4358af8e8dfb693e82a8477f9f2c847de5d44cf65fee75752eaca3 + sha256: 7a2025d60d5b1a6025726b3a5e292a8e5aa713abc3b16fd1f81735181f7bb282 category: main optional: false - name: seaborn @@ -18961,47 +18716,47 @@ package: category: main optional: false - name: sphinxcontrib-video - version: 0.2.1 + version: 0.3.1 manager: pip platform: linux-64 dependencies: sphinx: '*' - url: https://files.pythonhosted.org/packages/e9/0c/8b6c0f64dee92f02543f338ec6b6e6832f895c3112334894408237af7536/sphinxcontrib_video-0.2.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/b5/1d/304f3aa25334048582b063ec2dd1d990fcd3c8257f2abf8d7e941c54d50d/sphinxcontrib_video-0.3.1-py3-none-any.whl hash: - sha256: ebc31be0bb96e1aee260efe6b3806b81f9db20535c55b2f4ba355404f38d067d + sha256: 953450be3491241c7de889eeba15cd08fc4abab558963694154446b1167264c7 category: main optional: false - name: sphinxcontrib-video - version: 0.2.1 + version: 0.3.1 manager: pip platform: osx-arm64 dependencies: sphinx: '*' - url: https://files.pythonhosted.org/packages/e9/0c/8b6c0f64dee92f02543f338ec6b6e6832f895c3112334894408237af7536/sphinxcontrib_video-0.2.1-py3-none-any.whl + url: https://files.pythonhosted.org/packages/b5/1d/304f3aa25334048582b063ec2dd1d990fcd3c8257f2abf8d7e941c54d50d/sphinxcontrib_video-0.3.1-py3-none-any.whl hash: - sha256: ebc31be0bb96e1aee260efe6b3806b81f9db20535c55b2f4ba355404f38d067d + sha256: 953450be3491241c7de889eeba15cd08fc4abab558963694154446b1167264c7 category: main optional: false - name: sphinxext-remoteliteralinclude - version: 0.4.0 + version: 0.5.0 manager: pip platform: linux-64 dependencies: six: '*' - url: https://files.pythonhosted.org/packages/13/0d/7e5009f48c33d5fd533dda80b42c4a79fce8dfdd1b617d0a9df9a9836145/sphinxext_remoteliteralinclude-0.4.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/1c/3d/d692d2d08b6d9412bea6f9595ba9a0f9ce26fe7f409acbeda74f6be19d56/sphinxext_remoteliteralinclude-0.5.0-py3-none-any.whl hash: - sha256: e91378f07f378e6ca435246e07334c19a8c16b9baf6e180fc3e428a359846ea2 + sha256: d8be8fbff4a23b8412cf3a2270e6a4358ae705c585dc4e3251b34423f93b6c23 category: main optional: false - name: sphinxext-remoteliteralinclude - version: 0.4.0 + version: 0.5.0 manager: pip platform: osx-arm64 dependencies: six: '*' - url: https://files.pythonhosted.org/packages/13/0d/7e5009f48c33d5fd533dda80b42c4a79fce8dfdd1b617d0a9df9a9836145/sphinxext_remoteliteralinclude-0.4.0-py3-none-any.whl + url: https://files.pythonhosted.org/packages/1c/3d/d692d2d08b6d9412bea6f9595ba9a0f9ce26fe7f409acbeda74f6be19d56/sphinxext_remoteliteralinclude-0.5.0-py3-none-any.whl hash: - sha256: e91378f07f378e6ca435246e07334c19a8c16b9baf6e180fc3e428a359846ea2 + sha256: d8be8fbff4a23b8412cf3a2270e6a4358ae705c585dc4e3251b34423f93b6c23 category: main optional: false - name: statsd @@ -19249,33 +19004,33 @@ package: category: main optional: false - name: wordcloud - version: 1.9.3 + version: 1.9.4 manager: pip platform: linux-64 dependencies: numpy: '>=1.6.1' pillow: '*' matplotlib: '*' - url: https://files.pythonhosted.org/packages/32/52/4fb51dde6c6a57669501e51ef205feb7520ce7b2dda100b30bb588e02866/wordcloud-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + url: https://files.pythonhosted.org/packages/7e/29/5fd253433d880dd91a0e058e292fae5828277166e988204638ede2a3e6ce/wordcloud-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl hash: - sha256: 387dc2bd528ff6bb661451f2a9fd4ccf74b86072d7a2c868285d4c0cf26abeb4 + sha256: 6570cc4e48e8e951d24ef6599cd8bf7ff405fbe995ff6d596bcdfa290a6206a8 category: main optional: false - name: wordcloud - version: 1.9.3 + version: 1.9.4 manager: pip platform: osx-arm64 dependencies: numpy: '>=1.6.1' pillow: '*' matplotlib: '*' - url: https://files.pythonhosted.org/packages/78/08/3834d47dc30f3add3899e5bdc5a371194d23e649e2923ce15a66c9903976/wordcloud-1.9.3-cp39-cp39-macosx_11_0_arm64.whl + url: https://files.pythonhosted.org/packages/06/dc/87b01d90d62bd6715c864f379ae7ace8a53b0755abf1ad8e822129002528/wordcloud-1.9.4-cp39-cp39-macosx_11_0_arm64.whl hash: - sha256: daa6cfa11ce24e7eb4e42dc896dae4f74ae2166cf90ec997996300566e6811d1 + sha256: 34843fa49135c4ed3739dea050696e707fd00e7335ee4ed62c33639589f90adf category: main optional: false - name: ydata-profiling - version: 4.10.0 + version: 4.12.1 manager: pip platform: linux-64 dependencies: @@ -19299,13 +19054,13 @@ package: wordcloud: '>=1.9.3' dacite: '>=1.8' numba: '>=0.56.0,<1' - url: https://files.pythonhosted.org/packages/5b/9e/18ace1bd7616642d4a933f2957908b5c6aeb612d1dd0d13c774c8667943f/ydata_profiling-4.10.0-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/a8/e2/91e8ec48c5a9ba759847d74bb69475d22d79922f686e5252d0eb16e5bb85/ydata_profiling-4.12.1-py2.py3-none-any.whl hash: - sha256: 569231cd35f8f260829a96129ae3ed1a177ec879b290c2d6fdfde40fe993c5ed + sha256: c14e148dfc779540203acd17b2298171a72c8098c7e2481f8030f50d6f0dc4b5 category: main optional: false - name: ydata-profiling - version: 4.10.0 + version: 4.12.1 manager: pip platform: osx-arm64 dependencies: @@ -19329,8 +19084,8 @@ package: wordcloud: '>=1.9.3' dacite: '>=1.8' numba: '>=0.56.0,<1' - url: https://files.pythonhosted.org/packages/5b/9e/18ace1bd7616642d4a933f2957908b5c6aeb612d1dd0d13c774c8667943f/ydata_profiling-4.10.0-py2.py3-none-any.whl + url: https://files.pythonhosted.org/packages/a8/e2/91e8ec48c5a9ba759847d74bb69475d22d79922f686e5252d0eb16e5bb85/ydata_profiling-4.12.1-py2.py3-none-any.whl hash: - sha256: 569231cd35f8f260829a96129ae3ed1a177ec879b290c2d6fdfde40fe993c5ed + sha256: c14e148dfc779540203acd17b2298171a72c8098c7e2481f8030f50d6f0dc4b5 category: main optional: false diff --git a/monodocs-environment.yaml b/monodocs-environment.yaml index 717a1523c2..29b1e722e6 100644 --- a/monodocs-environment.yaml +++ b/monodocs-environment.yaml @@ -61,7 +61,7 @@ dependencies: - neptune # neptune - pip: - - flytekit>=1.12.1b2 + - flytekit>=1.15.0a1 - readthedocs-sphinx-ext - sphinx-code-include - sphinxext-remoteliteralinclude diff --git a/rfc/system/5598-deterministic-errors-distributed-training.md b/rfc/system/5598-deterministic-errors-distributed-training.md new file mode 100644 index 0000000000..8a6c864ca0 --- /dev/null +++ b/rfc/system/5598-deterministic-errors-distributed-training.md @@ -0,0 +1,132 @@ +# Deterministic error propagation for distributed (training) tasks + +**Authors:** + +- @bgedik +- @fg91 + +## 1 Executive Summary + +Flyte can schedule distributed training jobs leverging e.g. the [kubeflow training operator](https://github.com/kubeflow/training-operator/tree/f55a91d03f23498cdb465ac26c78566228077c51) and its `PyTorchJob`, `TFJob`, `MPIJob`, ... + +For these distributed jobs, multiple Kubernetes pods are launched. Any of these worker pods can crash, causing all other worker pods in the distributed job to fail subsequently because one worker disappeared. + +Error propagation, in Flyte, happens by the pod entrypoint uploading a file called `error.pb` to blob storage which contains (among other things) the error message and the information whether the error is retriable. + +In a failed distributed training job, all worker pods currently try to create the same `error.pb` file in blob storage - leading to a race condition. It is not guaranteed that the root-cause error is the one being reported to the user and used to determine whether the task can be retried. In fact, the current behavior typically results in the worst outcome, as the latter errors override the former ones, which is the exact opposite of the desired behavior of identifying the first error as the root cause. + +## 2 Motivation + +* As a Flyte user trying to understand why a distributed training task failed, I currently cannot rely on the error reported in the Flyte Console (UI) being the root cause error. + * Instead, I have to search the logs of each worker pod. For distributed training jobs with dozens or even hundreds of worker pods, this can be tedious. + * (Current remedies include combining all worker pods in stackdriver logs using a wildcard in the pod name and then filtering by severity.) +* As a Flyte user marking specific errors as retriable (using a `FlyteRecoverableException`), I want Flyte to deterministically determine the root cause error that killed the distributed job so that the retry behaviour does not suffer from a race condition. + +## 3 Proposed Implementation + +When a distributed training job dies, one of the worker pods often dies due to a certain root-cause error. The other worker pods subsequently crash because one of the workers disappeared. We are interested in the root-cause error, not the error that one worker disappeared. + +As done in torch distributed elastic, we propose to use the timestamp of the exception as a proxy to determine the root-cause. Pytorch distributed (which is used by the `flytekitplugins.kfpytorch.Elastic` task type), for instance, raises a [ChildFailedError](https://github.com/pytorch/pytorch/blob/36d24925c66661037349cad3759dc33850ed0291/torch/distributed/elastic/multiprocessing/errors/__init__.py#L199C16-L199C17) exception which contains a so-called [ProcessFailure](https://github.com/pytorch/pytorch/blob/36d24925c66661037349cad3759dc33850ed0291/torch/distributed/elastic/multiprocessing/errors/__init__.py#L90) which contains the exception timestamp. + +We acknowledge that other frameworks might choose to determine the root cause error in a different way which is why we propose to introduce the concept of an *error aggregation strategy* employed by flytepropeller to identity the root-cause error in a distributed job. The authors of this RFC aim to implement the strategy `"earliest"` for the two kubeflow pytorch task types (`task_config=PyTorch` and `task_config=Elastic` provided by `flytekitplugins.kfpytorch`) but propose to structure the introduced changes in a way that allows potential other strategies. + +### Flyteplugins - Creation of Kubernetes resources for tasks + +The [pod entrypoint `pyflyte-execute`](https://github.com/flyteorg/flytekit/blob/master/flytekit/bin/entrypoint.py) needs to be configured to handle multiple error files for a distributed task. + +For this purpose, we propose that distributed plugins in `flyteplugins` like `pytorch` inject two new (optional) `FLYTE_INTERNAL_` environment variables: + +* `FLYTE_INTERNAL_WORKER_NAME`: One of our goals is that the UI eventually tells the user in which worker the root-cause error occurred. For this purpose, the pod entrypoint reads the value from this environment variable. Plugin's can choose for themselves what value this environment variable should have. For the pytorch plugin we aim to set it to the pod name via the [Kubernetes downward api](https://kubernetes.io/docs/concepts/workloads/pods/downward-api/). +* `FLYTE_INTERNAL_DIST_ERROR_STRATEGY` tells the pod entrypoint which strategy `flytepropeller` will use to aggregate the error from the worker pods. The pod entrypoint needs this information to determine which information it needs to provide as part of the error file. More on this below. + +(We propose to define the keys for these environment variables [here](https://github.com/flyteorg/flyte/blob/815f85d0ce90a3ace61cce17c0bfb441ac2dbcc3/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds.go#L20) where the existing `FLYTE_INTERNAL_` environment variables are defined while the respective plugins are responsible for actually setting them to the desired value in the respective pod specs.) + +### Flytekit + +#### Preventing the race condition + +For distributed training tasks, the [pod entrypoint `pyflyte-execute`](https://github.com/flyteorg/flytekit/blob/master/flytekit/bin/entrypoint.py) must not upload a single file called [`error.pb`](https://github.com/flyteorg/flytekit/blob/77d056ab9fda40ec6b2312a4d197b9107cdb70dc/flytekit/core/constants.py#L4) (which is the same for all worker pods) but instead choose a file name which differs for each worker pod. We propose to simply include a random uuid in the filename `error-.pb` to prevent the race condition. Furthermore, we propose that these error files get grouped in an `errors/` folder under the raw output prefix. + +#### Providing relevant error information to the backend an UI + +The pod entrypoint needs to provide the information in which worker the error occurred in order to display the name in the UI. For the strategy `"earliest"`, it needs to also provide the timestamp when the error occurred. + +We therefore propose to add optional attributes `worker` and `timestamp` (unix epoch time with micro- or nanoseconds granularity) to flyteidl's [`message ContainerError`](https://github.com/flyteorg/flyte/blob/30d33149159c90d0de44f6351b8d5d7309242e59/flyteidl/protos/flyteidl/core/errors.proto#L11). + + +Furthermore, we propose to add an optional `timestamp` attributes to all [flytekit exceptions](https://github.com/flyteorg/flytekit/tree/master/flytekit/exceptions). + +The flytekit pytorch elastic plugin, for instance, catches `ChildFailedError`s [here](https://github.com/flyteorg/flytekit/blob/77d056ab9fda40ec6b2312a4d197b9107cdb70dc/plugins/flytekit-kf-pytorch/flytekitplugins/kfpytorch/task.py#L449), would extract the timestamp, and re-raise it as a Flyte exception which contains a timestamp. (Other plugins, e.g. non-elastic pytorch, which don't come with built-in exception types that include error timestamps, can themselves record the timestamp when the `task_function` raises an exception.) + +The entrypoint `pyflyte-execute` will transfer the timestamp from the flytekit exception into the protobuf `ContainerError`. It will also set the `worker` attribute of the `ContainerError` according to the `FLYTE_INTERNAL_WORKER_NAME` environment variable introduced above. + +### Flytepropeller/Flyteplugins - Aggregate the errors in the backend + +In the [kubernetes plugin machinery](https://github.com/flyteorg/flyte/blob/815f85d0ce90a3ace61cce17c0bfb441ac2dbcc3/flyteplugins/go/tasks/pluginmachinery/k8s/plugin.go) we propose to define the error aggregation strategy and allow plugins to configure it via their `PluginProperties`: + +```go +type ErrorAggregationStrategy int + +const ( + // Single error file from a single container + Default ErrorAggregationStrategy = iota + + // Earliest error from potentially multiple error files + Earliest +) + +// System level properties that this Plugin supports +type PluginProperties struct { + ... + ErrorAggregationStrategy ErrorAggregationStrategy +} +``` + +Currently, [here](https://github.com/flyteorg/flyte/blob/4514860cf56ba62717f6c207f269410a8c1a5461/flytepropeller/pkg/controller/nodes/task/k8s/plugin_manager.go#L290) in the plugin manager, upon completion of a node execution, a new [`RemoteFileOutputReader`](https://github.com/flyteorg/flyte/blob/d6da838627d57cd27d60beea004e974ce1fb3ca5/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader.go#L14) is constructed which is responsible for reading the error file uploaded to blob storage. This `RemoteFileOutputReader` implements the [`OutputReader` interface](https://github.com/flyteorg/flyte/blob/1e54d21c4d4ee74245f799a57b4bb8a5534e8368/flyteplugins/go/tasks/pluginmachinery/io/iface.go#L32). + +We propose to implement a new `MultiErrorFileRemoteFileOutputReader` which (for future flexibility) can be configured with the different strategies we define. Initially, the only available strategy will be `"earliest"` which the RFC authors aim to use for the kubeflow pytorch plugin. This output reader will search for all error files in the `/errors` folder under the raw output prefix and aggregate the error as specified by the strategy. + +If in [the plugin manager](https://github.com/flyteorg/flyte/blob/4514860cf56ba62717f6c207f269410a8c1a5461/flytepropeller/pkg/controller/nodes/task/k8s/plugin_manager.go#L290) the respective plugin is found to configure an error aggregation strategy other than `Default`, we instantiate such a `MultiErrorFileRemoteFileOutputReader` reader (instead of the existing `RemoteFileOutputReader`) and configure it with the respective strategy. + +For the strategy `Earliest`, it will determine the `ContainerError` with the earliest timestamp, will use this one to determine retriability, and will communicate this specific error message to flyteadmin (and finally the UI). + +#### Backwards compatibility +We propose that the new `MultiErrorFileRemoteFileOutputReader` falls back to reading the `error.pb` (behaviour of the default `RemoteFileOutputReader`) if no `error-.pb` files are found in order to solve the problem of backwards compatibility: + +* If flytekit uses a version that supports multiple error files but the backend does not yet, `pyflyte-execute` will not upload multiple error files for distributed tasks since the `FLYTE_INTERNAL_DIST_ERROR_STRATEGY` environment variable will not be set. +* If flytekit uses an older version that does not support multiple error files while the backend does, a single error file will be uploaded despite `FLYTE_INTERNAL_DIST_ERROR_STRATEGY` being set. The output reader will, however, fall back to reading the single `error.pb`. + + +### Displaying the name of the worker which experienced the root cause error in the UI + +We propose that in the UI, in addition to the root-cause error message, for distributed tasks we display the name of the worker pod which experienced the root-cause error. As a user trying to debug a failure, this allows to quickly identify the logs of the relevant pod out of potentially hundreds of pods. + +To communicate the name of the worker which experienced the root-cause error from flytepropeller to flyteadmin and eventually the UI, we propose to add the (optional) attribute `worker` also in the [`core.ExecutionError` protobuf message](https://github.com/flyteorg/flyte/blob/815f85d0ce90a3ace61cce17c0bfb441ac2dbcc3/flyteidl/protos/flyteidl/core/execution.proto#L61). + +In `ReadError` of the new `MultiErrorFileRemoteFileOutputReader`, we will then transfer the name of the respective worker pod which experienced the root-cause error from the `ContainerError` in the `ErrorDocument` to the `core.ExecutionError` (as is already done today in the [`RemoteFileOutputReader` for the error message](https://github.com/flyteorg/flyte/blob/815f85d0ce90a3ace61cce17c0bfb441ac2dbcc3/flyteplugins/go/tasks/pluginmachinery/ioutils/remote_file_output_reader.go#L65)). + +With these changes, flyteadmin's `/api/v1/executions///` endpoint, which today provides the error message to the UI, then also provides the information which worker experienced the root cause error. `flyteconsole` needs to be modified to show this information. + +## 4 Metrics & Dashboards + +- + +## 5 Drawbacks + +We don't see any drawbacks to making the error handling of distributed training tasks deterministic and making it easier for users to identify which pod in a distributed job failed first. + +## 6 Alternatives + +A poor man's version would be to not override the error file if it already exists. While this is a worse solution than proposed above as there still is a race condition, this would still be better than the current behavior because at least we would *favor* earlier errors instead of later ones. + +## 7 Potential Impact and Dependencies + +The authors of this RFC have experience with pytorch (elastic and non-elastic) distributed training jobs and will implement the proposed changes for the pytorch plugin. The improvement proposed in this RFC might be relevant for community members using e.g. the distributed tensorflow or mpi plugins. If possible, they should be included in the RFC and implementation process so that all distributed task plugins can benefit from the improved error handling. + +## 8 Unresolved questions + +- + +## 9 Conclusion + +With ML models getting bigger and bigger, distributed training jobs become increasingly important to the Flyte community. Removing the race condition outlined above from Flyte's error handling for such jobs will significantly improve the UX because we will be able to determine recoverability and report the root-cause error in the Flyte UI in a deterministic way. diff --git a/script/generate_config_docs.sh b/script/generate_config_docs.sh index 2a1485aed8..e7358d9057 100755 --- a/script/generate_config_docs.sh +++ b/script/generate_config_docs.sh @@ -3,7 +3,7 @@ set -e echo "Generating Flyte Configuration Documents" -CUR_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)" ROOT_DIR=${CUR_DIR}/.. OUTPUT_DIR="${ROOT_DIR}"/docs/deployment/configuration/generated GOBIN=${GOPATH:-~/go}/bin @@ -17,30 +17,35 @@ mv flyteadmin/bin/flytescheduler ${GOBIN}/scheduler make -C flytepropeller compile_flytepropeller mv flytepropeller/bin/flytepropeller ${GOBIN}/flytepropeller -output_config () { -CONFIG_NAME=$1 -COMPONENT=$2 -COMMAND=$3 -OUTPUT_PATH=${OUTPUT_DIR}/${COMMAND}_config.rst +# Config files are needed to generate docs, so we generate an empty +# file and reuse it to invoke the docs command in all components. +EMPTY_CONFIG_FILE=empty-config.yaml +touch empty-config.yaml -if [ -z "$CONFIG_NAME" ]; then - log_err "output_config CONFIG_NAME value not specified in arg1" - return 1 -fi +output_config() { + CONFIG_NAME=$1 + COMPONENT=$2 + COMMAND=$3 + OUTPUT_PATH=${OUTPUT_DIR}/${COMMAND}_config.rst -if [ -z "$COMPONENT" ]; then - log_err "output_config COMPONENT value not specified in arg2" - return 1 -fi + if [ -z "$CONFIG_NAME" ]; then + log_err "output_config CONFIG_NAME value not specified in arg1" + return 1 + fi -echo ".. _$COMPONENT-config-specification: + if [ -z "$COMPONENT" ]; then + log_err "output_config COMPONENT value not specified in arg2" + return 1 + fi + + echo ".. _$COMPONENT-config-specification: ######################################### Flyte $CONFIG_NAME Configuration ######################################### -" > "${OUTPUT_PATH}" +" >"${OUTPUT_PATH}" -$GOBIN/$COMMAND config docs >> "${OUTPUT_PATH}" + $GOBIN/$COMMAND config --config $EMPTY_CONFIG_FILE docs >>"${OUTPUT_PATH}" } output_config "Admin" flyteadmin flyteadmin diff --git a/script/prepare_artifacts.sh b/script/prepare_artifacts.sh old mode 100644 new mode 100755 index 32cf1e769a..c794493f2e --- a/script/prepare_artifacts.sh +++ b/script/prepare_artifacts.sh @@ -29,6 +29,9 @@ sed "s/v0.1.10/${VERSION}/g" ./charts/flyte-binary/README.md > temp.txt && mv te grep -rlZ "version:[^P]*# VERSION" ./charts/flyteagent/Chart.yaml | xargs -0 sed -i "s/version:[^P]*# VERSION/version: ${VERSION} # VERSION/g" sed "s/v0.1.10/${VERSION}/g" ./charts/flyteagent/README.md > temp.txt && mv temp.txt ./charts/flyteagent/README.md +grep -rlZ "version:[^P]*# VERSION" ./charts/flyte-binary/Chart.yaml | xargs -0 sed -i "s/version:[^P]*# VERSION/version: ${VERSION} # VERSION/g" +sed "s/v0.1.10/${VERSION}/g" ./charts/flyte-binary/README.md > temp.txt && mv temp.txt ./charts/flyte-binary/README.md + helm dep update ./charts/flyte helm dep update ./charts/flyte-core helm dep update ./charts/flyte-deps @@ -67,5 +70,8 @@ sed -i "s,tag:[^P]*# FLYTE_TAG,tag: ${VERSION} # FLYTE_TAG," ./charts/flyte-bina sed -i "s,repository:[^P]*# FLYTECOPILOT_IMAGE,repository: cr.flyte.org/flyteorg/flytecopilot-release # FLYTECOPILOT_IMAGE," ./charts/flyte-binary/values.yaml sed -i "s,tag:[^P]*# FLYTECOPILOT_TAG,tag: ${VERSION} # FLYTECOPILOT_TAG," ./charts/flyte-binary/values.yaml +sed -i "s,tag:[^P]*# FLYTEBINARY_TAG,tag: ${VERSION} # FLYTEBINARY_TAG," ./charts/flyte-binary/values.yaml +sed -i "s,repository:[^P]*# FLYTEBINARY_IMAGE,repository: cr.flyte.org/flyteorg/flyte-binary-release # FLYTEBINARY_IMAGE," ./charts/flyte-binary/values.yaml + sed -i "s,tag:[^P]*# FLYTEAGENT_TAG,tag: ${VERSION} # FLYTEAGENT_TAG," ./charts/flyteagent/values.yaml sed -i "s,repository:[^P]*# FLYTEAGENT_IMAGE,repository: cr.flyte.org/flyteorg/flyteagent-release # FLYTEAGENT_IMAGE," ./charts/flyteagent/values.yaml diff --git a/script/release.sh b/script/release.sh index 9816fb6a7c..beae0203b7 100755 --- a/script/release.sh +++ b/script/release.sh @@ -25,4 +25,6 @@ sed -i "s,image:[^P]*# FLYTECOPILOT_IMAGE,image: cr.flyte.org/flyteorg/flytecopi sed -i "s,image:[^P]*# FLYTECOPILOT_IMAGE,image: cr.flyte.org/flyteorg/flytecopilot:${VERSION} # FLYTECOPILOT_IMAGE," ./charts/flyte-core/values.yaml sed -i "s,tag:[^P]*# FLYTECOPILOT_TAG,tag: ${VERSION} # FLYTECOPILOT_TAG," ./charts/flyte-binary/values.yaml +sed -i "s,tag:[^P]*# FLYTEBINARY_TAG,tag: ${VERSION} # FLYTEBINARY_TAG," ./charts/flyte-binary/values.yaml + sed -i "s,tag:[^P]*# FLYTEAGENT_TAG,tag: ${FLYTEKIT_TAG} # FLYTEAGENT_TAG," ./charts/flyteagent/values.yaml