diff --git a/lib/workload/components/python-workflow-tools-layer/index.ts b/lib/workload/components/python-workflow-tools-layer/index.ts new file mode 100644 index 000000000..e91895ff1 --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/index.ts @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 + +import { Construct } from 'constructs'; +import { PythonLayerVersion } from '@aws-cdk/aws-lambda-python-alpha'; +import path from 'path'; +import { PythonLambdaLayerConstruct } from '../python-lambda-layer'; + +export interface PythonWorkflowLambdaLayerConstructProps { + layerPrefix: string; +} + +export class WorkflowToolsPythonLambdaLayer extends Construct { + public readonly lambdaLayerVersionObj: PythonLayerVersion; + + constructor(scope: Construct, id: string, props: PythonWorkflowLambdaLayerConstructProps) { + super(scope, id); + + // Generate lambda workflow python layer + // Get lambda layer object + this.lambdaLayerVersionObj = new PythonLambdaLayerConstruct(this, 'lambda_layer', { + layerName: `${props.layerPrefix}-workflow-py-layer`, + layerDescription: 'Lambda Layer for handling the workflow api via Python', + layerDirectory: path.join(__dirname, 'workflow_tools_layer'), + }).lambdaLayerVersionObj; + } +} diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/poetry.lock b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/poetry.lock new file mode 100644 index 000000000..b866eb7ab --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/poetry.lock @@ -0,0 +1,273 @@ +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "mypy-boto3-secretsmanager" +version = "1.35.0" +description = "Type annotations for boto3.SecretsManager 1.35.0 service generated with mypy-boto3-builder 7.26.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_secretsmanager-1.35.0-py3-none-any.whl", hash = "sha256:ff72d5743061d1d9bf3f5e308990b78c9bede8e02648f6eb8712e3b2e76d2669"}, + {file = "mypy_boto3_secretsmanager-1.35.0.tar.gz", hash = "sha256:c37d181315ba10d8546872304d7f266e7461429b08e63507c23cc508c3ef4264"}, +] + +[[package]] +name = "mypy-boto3-ssm" +version = "1.35.21" +description = "Type annotations for boto3.SSM 1.35.21 service generated with mypy-boto3-builder 8.1.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_boto3_ssm-1.35.21-py3-none-any.whl", hash = "sha256:a4a0f197cecedf83b52fe666f3d0bdb3fbe43417fd5707a2a0378400be8eec98"}, + {file = "mypy_boto3_ssm-1.35.21.tar.gz", hash = "sha256:5ed55c7509fadda6155718fe59729b3dab0f8b76ff1e037a87abff9dd2475f49"}, +] + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12, <3.13" +content-hash = "2d3f70fffe5615fe3fcd32bf3997d69496a2dfefa577145ae9af0f8f10ab1914" diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/pyproject.toml b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/pyproject.toml new file mode 100644 index 000000000..001b5bcb9 --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "workflow_tools" +version = "0.0.1" +description = "Workflow Manager Lambda Layers" +license = "GPL-3.0-or-later" +authors = [ + "Alexis Lucattini" +] +homepage = "https://github.com/umccr/orcabus" +repository = "https://github.com/umccr/orcabus" + +[tool.poetry.dependencies] +python = "^3.12, <3.13" +requests = "^2.32.3" + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +pytest = "^7.0.0" # For testing only +# For typehinting only, not required at runtime +mypy-boto3-ssm = "^1.34" +mypy-boto3-secretsmanager = "^1.34" diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/__init__.py b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/__init__.py new file mode 100644 index 000000000..a07035002 --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/__init__.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 + +import re + + +def strip_context_from_orcabus_id(orcabus_id: str) -> str: + """ + Strip the context from the orcabus_id + :param orcabus_id: + :return: + """ + return re.sub(r"^(\w+).", "", orcabus_id) \ No newline at end of file diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/aws_helpers.py b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/aws_helpers.py new file mode 100644 index 000000000..0a5bf2f44 --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/aws_helpers.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +# Standard imports +import typing +import boto3 +import json +from os import environ + +# Type hinting +if typing.TYPE_CHECKING: + from mypy_boto3_secretsmanager import SecretsManagerClient + from mypy_boto3_ssm import SSMClient + + +def get_secretsmanager_client() -> 'SecretsManagerClient': + return boto3.client('secretsmanager') + + +def get_ssm_client() -> 'SSMClient': + return boto3.client('ssm') + + +def get_secret_value(secret_id) -> str: + """ + Collect the secret value + :param secret_id: + :return: + """ + # Get the boto3 response + get_secret_value_response = get_secretsmanager_client().get_secret_value(SecretId=secret_id) + + return get_secret_value_response['SecretString'] + + +def get_ssm_value(parameter_name) -> str: + # Get the boto3 response + get_ssm_parameter_response = get_ssm_client().get_parameter(Name=parameter_name) + + return get_ssm_parameter_response['Parameter']['Value'] + + +def get_orcabus_token() -> str: + """ + From the AWS Secrets Manager, retrieve the OrcaBus token. + :return: + """ + return json.loads(get_secret_value(environ.get("ORCABUS_TOKEN_SECRET_ID")))['id_token'] + + +def get_hostname() -> str: + return get_ssm_value(environ.get("HOSTNAME_SSM_PARAMETER")) diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/globals.py b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/globals.py new file mode 100644 index 000000000..95a22017f --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/globals.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +from enum import Enum + +# AWS PARAMETERS +WORKFLOW_SUBDOMAIN_NAME = "workflow" + +# API ENDPOINTS +WORKFLOW_RUN_ENDPOINT = "api/v1/workflowrun" +PAYLOAD_ENDPOINT = "api/v1/payload" \ No newline at end of file diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/payload_helpers.py b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/payload_helpers.py new file mode 100644 index 000000000..5fe534cd6 --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/payload_helpers.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 + +""" +Getting the payload helpers +""" +from typing import Dict + +from workflow_tools.utils.globals import PAYLOAD_ENDPOINT +from workflow_tools.utils.requests_helpers import get_request_results + + +def get_payload(payload_id: str) -> Dict: + """ + Get subject from the subject id + :param contact_id: + :return: + """ + # Get subject + return get_request_results(PAYLOAD_ENDPOINT, payload_id) \ No newline at end of file diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/requests_helpers.py b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/requests_helpers.py new file mode 100644 index 000000000..d0b6fc4ac --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/requests_helpers.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 +from typing import Dict, Optional, List, Union +from urllib.parse import urlunparse, urlparse + +# Standard imports +import requests +import logging +from copy import deepcopy + +from . import strip_context_from_orcabus_id +# Locals +from .globals import ( + WORKFLOW_SUBDOMAIN_NAME, +) + +from .aws_helpers import ( + get_orcabus_token, get_hostname +) + +# Globals +DEFAULT_REQUEST_PARAMS = { + "rowsPerPage": 1000 +} + +# Set logging +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + + +def url_path_ext(url: str, path: str) -> str: + """ + Append a path to a URL + :param url: + :param path: + :return: + """ + return str(urlunparse( + urlparse(url)._replace(path=f"{urlparse(url).path}/{path}") + )) + + +def get_url(endpoint: str) -> str: + """ + Get the URL for the Metadata endpoint + :param endpoint: + :return: + """ + # Get the hostname + hostname = get_hostname() + + return urlunparse( + [ + "https", + ".".join([WORKFLOW_SUBDOMAIN_NAME, hostname]), + endpoint, + None, None, None + ] + ) + + +def get_request_results(endpoint: str, orcabus_id: str) -> Union[List, Dict]: + """ + Run get response against the Metadata endpoint + :param endpoint: + :param params: + :return: + """ + # Get authorization header + headers = { + "Authorization": f"Bearer {get_orcabus_token()}" + } + + # Make the request + response = requests.get( + url_path_ext( + get_url(endpoint) if not urlparse(endpoint).scheme else endpoint, + strip_context_from_orcabus_id(orcabus_id) + ), + headers=headers, + ) + + response.raise_for_status() + + return response.json() + + +def get_request_results_ext(endpoint: str, orcabus_id: str, url_extension: str) -> Union[List, Dict]: + """ + Run get response against the Metadata endpoint + :param endpoint: + :param params: + :return: + """ + # Get authorization header + headers = { + "Authorization": f"Bearer {get_orcabus_token()}" + } + + req_params = deepcopy(DEFAULT_REQUEST_PARAMS) + + # Make the request + response = requests.get( + url_path_ext( + get_url(endpoint) if not urlparse(endpoint).scheme else endpoint, + strip_context_from_orcabus_id(orcabus_id) + "/" + url_extension + ), + headers=headers, + params=req_params + ) + + response.raise_for_status() + + return response.json() + + +def get_request_response_results(endpoint: str, params: Optional[Dict] = None) -> List[Dict]: + """ + Run get response against the Metadata endpoint + :param endpoint: + :param params: + :return: + """ + # Get authorization header + headers = { + "Authorization": f"Bearer {get_orcabus_token()}" + } + + req_params = deepcopy(DEFAULT_REQUEST_PARAMS) + + req_params.update( + params if params is not None else {} + ) + + + # Make the request + response = requests.get( + get_url(endpoint) if not urlparse(endpoint).scheme else endpoint, + headers=headers, + params=req_params + ) + + response.raise_for_status() + + response_json = response.json() + + if 'links' not in response_json.keys(): + return [response_json] + + if 'next' in response_json['links'].keys() and response_json['links']['next'] is not None: + return response_json['results'] + get_request_response_results(response_json['links']['next']) + return response_json['results'] diff --git a/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/workflow_run_helpers.py b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/workflow_run_helpers.py new file mode 100644 index 000000000..d61b514ef --- /dev/null +++ b/lib/workload/components/python-workflow-tools-layer/workflow_tools_layer/src/workflow_tools/utils/workflow_run_helpers.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 + +""" +Helpers for using the contact API endpoint +""" + +# Standard imports +from typing import List, Dict +from .globals import WORKFLOW_RUN_ENDPOINT + +# Local imports +from .requests_helpers import get_request_response_results, get_request_results_ext, get_request_results + + +def get_workflow_run(workflow_run_orcabus_id: str) -> Dict: + """ + Get contact from the contact id + :param contact_orcabus_id: + :return: + """ + # Get contact + return get_request_results(WORKFLOW_RUN_ENDPOINT, workflow_run_orcabus_id) + + +def get_workflow_run_from_portal_run_id(portal_run_id: str) -> Dict: + """ + Get subject from the subject id + :param contact_id: + :return: + """ + # We have an internal id, convert to int + params = { + "portalRunId": portal_run_id + } + + # Get workflow run id + return get_request_results( + WORKFLOW_RUN_ENDPOINT, + get_request_response_results(WORKFLOW_RUN_ENDPOINT, params)[0].get("orcabusId") + ) + + + +def get_workflow_run_state(workflow_run_orcabus_id: str, status: str) -> Dict: + """ + Get contact from the contact id + :param contact_orcabus_id: + :return: + """ + # Get contact + return next( + filter( + lambda workflow_state_iter_: workflow_state_iter_["status"] == status, + get_request_results_ext(WORKFLOW_RUN_ENDPOINT, workflow_run_orcabus_id, "state") + ) + ) + + diff --git a/lib/workload/stateless/stacks/rnasum-pipeline-manager/deploy/index.ts b/lib/workload/stateless/stacks/rnasum-pipeline-manager/deploy/index.ts index b1db6de81..26239d23e 100644 --- a/lib/workload/stateless/stacks/rnasum-pipeline-manager/deploy/index.ts +++ b/lib/workload/stateless/stacks/rnasum-pipeline-manager/deploy/index.ts @@ -13,6 +13,9 @@ import { DefinitionBody } from 'aws-cdk-lib/aws-stepfunctions'; import { WfmWorkflowStateChangeIcav2ReadyEventHandlerConstruct } from '../../../../components/sfn-icav2-ready-event-handler'; import { Icav2AnalysisEventHandlerConstruct } from '../../../../components/sfn-icav2-state-change-event-handler'; import { PythonLambdaGetCwlObjectFromS3InputsConstruct } from '../../../../components/python-lambda-get-cwl-object-from-s3-inputs-py'; +import { Duration } from 'aws-cdk-lib'; +import * as secretsmanager from 'aws-cdk-lib/aws-secretsmanager'; +import { WorkflowToolsPythonLambdaLayer } from '../../../../components/python-workflow-tools-layer'; export interface RnasumIcav2PipelineManagerConfig { /* ICAv2 Pipeline analysis essentials */ @@ -48,6 +51,10 @@ export class RnasumIcav2PipelineManagerStack extends cdk.Stack { private readonly eventBusObj: events.IEventBus; private readonly pipelineIdSsmObj: ssm.IStringParameter; + // Globals + private readonly hostnameSsmParameterPath = '/hosted_zone/umccr/name'; + private readonly orcabusTokenSecretId = 'orcabus/token-service-jwt'; // pragma: allowlist secret + constructor(scope: Construct, id: string, props: RnasumIcav2PipelineManagerStackProps) { super(scope, id, props); @@ -209,5 +216,50 @@ export class RnasumIcav2PipelineManagerStack extends cdk.Stack { generateOutputsJsonSfn: configureOutputsSfn, } ).stateMachineObj; + + /* + Collect the required secret and ssm parameters for getting metadata + */ + const hostnameSsmParameterObj = ssm.StringParameter.fromStringParameterName( + this, + 'hostname_ssm_parameter', + this.hostnameSsmParameterPath + ); + const orcabusTokenSecretObj = secretsmanager.Secret.fromSecretNameV2( + this, + 'orcabus_token_secret', + this.orcabusTokenSecretId + ); + + /* Cheeky lambda for the OrcaUI to trigger */ + const rerunWithDataSet = new PythonFunction(this, 'rerun_with_new_dataset_py', { + functionName: 'rerunRnaSumWithNewDatasetLambdaPy', + entry: path.join(__dirname, '../lambdas/rerun_with_new_dataset_py'), + runtime: lambda.Runtime.PYTHON_3_12, + architecture: lambda.Architecture.ARM_64, + index: 'rerun_with_new_dataset.py', + handler: 'handler', + memorySize: 1024, + timeout: Duration.seconds(60), + layers: [ + new WorkflowToolsPythonLambdaLayer(this, 'workflow_tools_layer', { + layerPrefix: 'workflow-tools', + }).lambdaLayerVersionObj, + ], + }); + + /* Add env vars */ + rerunWithDataSet.addEnvironment( + 'HOSTNAME_SSM_PARAMETER', + hostnameSsmParameterObj.parameterName + ); + rerunWithDataSet.addEnvironment('ORCABUS_TOKEN_SECRET_ID', orcabusTokenSecretObj.secretName); + + /* Add permissions */ + hostnameSsmParameterObj.grantRead(rerunWithDataSet); + orcabusTokenSecretObj.grantRead(rerunWithDataSet); + + /* Allow the lambda to put events to the event bus */ + this.eventBusObj.grantPutEventsTo(rerunWithDataSet); } } diff --git a/lib/workload/stateless/stacks/rnasum-pipeline-manager/lambdas/rerun_with_new_dataset_py/rerun_with_new_dataset.py b/lib/workload/stateless/stacks/rnasum-pipeline-manager/lambdas/rerun_with_new_dataset_py/rerun_with_new_dataset.py new file mode 100644 index 000000000..a5d43f063 --- /dev/null +++ b/lib/workload/stateless/stacks/rnasum-pipeline-manager/lambdas/rerun_with_new_dataset_py/rerun_with_new_dataset.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 + +""" +Rerun the analysis with a new dataset + +Given a portal run id, and a dataset regenerate the ready payload +and rerun the analysis. + +Replace all instances of the portal run id with a new portal run id +""" + +# Standard imports +from datetime import datetime, timezone +import random +from typing import Dict +from os import environ +import boto3 +import json + +# External imports +from workflow_tools.utils.payload_helpers import get_payload +from workflow_tools.utils.workflow_run_helpers import get_workflow_run_from_portal_run_id, get_workflow_run_state + +# Globals +ORCABUS_TOKEN = None + + +# Functions +def get_event_bridge_session(): + """ + Get the event bridge session + :return: + """ + return boto3.Session().client("events") + + +def generate_portal_run_id(): + """ + Return a new portal run id in the format + YYYYMMDD{8-digit-random-hexadecimal} + :return: + """ + return datetime.now(timezone.utc).strftime("%Y%m%d") + "{:08x}".format(random.getrandbits(32)) + + +def replace_portal_run_id(old_portal_run_id, new_portal_run_id, payload: Dict) -> Dict: + """ + Replace all instances of the old portal run id with the new portal run id + :param old_portal_run_id: + :param new_portal_run_id: + :param payload: + :return: + """ + return json.loads( + json.dumps(payload).replace(old_portal_run_id, new_portal_run_id) + ) + + +def get_utc_timestamp() -> str: + """ + Get a UTC timestamp in ISO format + :return: + """ + return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z") + + +def remove_ref_id_from_payload(payload: Dict) -> Dict: + """ + Remove the ref_id from the payload + :param payload: + :return: + """ + payload.pop("payloadRefId", None) + payload.pop("orcabusId", None) + return payload + + +def update_dataset(payload: Dict, new_dataset: str) -> Dict: + """ + Update the dataset in the payload + :param payload: + :return: + """ + payload["data"]["inputs"]["dataset"] = new_dataset + return payload + + +def handler(event, context): + """ + Given a portal run id and a dataset, regenerate the ready payload + :param event: + :param context: + :return: + """ + + # Get the old portal run id + old_portal_run_id = event["portal_run_id"] + + # Get the new dataset + new_dataset = event["dataset"] + + # Get the workflow object from the old portal run id + workflow_obj = get_workflow_run_from_portal_run_id(old_portal_run_id) + # Get the READY run state + workflow_ready_run_state_obj = get_workflow_run_state(workflow_obj.get("orcabusId"), "READY") + # Get the payload from the READY run state + payload_obj = get_payload(workflow_ready_run_state_obj.get("payload")) + + # Generate a new portal run id + new_portal_run_id = generate_portal_run_id() + + # Replace the old portal run id with the new portal run id + new_payload = replace_portal_run_id(old_portal_run_id, new_portal_run_id, payload_obj) + # Remove the refId from the previous payload + new_payload = remove_ref_id_from_payload(new_payload) + # Update the dataset in the payload + new_payload = update_dataset(new_payload, new_dataset) + + # Regenerate the event detail + detail_dict: Dict = { + "portalRunId": new_portal_run_id, + "timestamp": get_utc_timestamp(), + "status": "READY", + "workflowName": workflow_obj.get("workflow").get("workflowName"), + "workflowVersion": workflow_obj.get("workflow").get("workflowVersion"), + "workflowRunName": workflow_obj.get("workflowRunName").replace(old_portal_run_id, new_portal_run_id), + "linkedLibraries": workflow_obj.get("libraries"), + "payload": new_payload + } + + # Put the event to the event bus + return get_event_bridge_session().put_events( + Entries=[ + { + "EventBusName": environ["EVENT_BUS_NAME"], + "Source": "orcabus.manual", + "DetailType": "WorkflowRunStateChange", + "Detail": json.dumps(detail_dict), + } + ] + )['Entries'] + + +# if __name__ == "__main__": +# # Import the json module +# import json +# +# # Set the environment variables +# environ['AWS_PROFILE'] = 'umccr-production' +# environ['AWS_REGION'] = 'ap-southeast-2' +# environ['HOSTNAME_SSM_PARAMETER'] = '/hosted_zone/umccr/name' +# environ['ORCABUS_TOKEN_SECRET_ID'] = 'orcabus/token-service-jwt' +# environ['EVENT_BUS_NAME'] = 'OrcaBusMain' +# +# print( +# json.dumps( +# handler( +# { +# "portal_run_id": "2024111144ce2633", +# "dataset": "GBM" +# }, +# None +# ), +# indent=4 +# ) +# ) +# +# # [ +# # { +# # "EventId": "c916b0c7-7476-c96e-1acf-c7caf0eed639" +# # } +# # ]